// Copyright 2015-2022 The Khronos Group Inc. // // SPDX-License-Identifier: Apache-2.0 OR MIT // // This header is generated from the Khronos Vulkan XML API Registry. #ifndef VULKAN_STRUCTS_HPP #define VULKAN_STRUCTS_HPP namespace VULKAN_HPP_NAMESPACE { //=============== //=== STRUCTS === //=============== struct AabbPositionsKHR { using NativeType = VkAabbPositionsKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR AabbPositionsKHR(float minX_ = {}, float minY_ = {}, float minZ_ = {}, float maxX_ = {}, float maxY_ = {}, float maxZ_ = {}) VULKAN_HPP_NOEXCEPT : minX(minX_), minY(minY_), minZ(minZ_), maxX(maxX_), maxY(maxY_), maxZ(maxZ_) { } VULKAN_HPP_CONSTEXPR AabbPositionsKHR(AabbPositionsKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; AabbPositionsKHR(VkAabbPositionsKHR const &rhs) VULKAN_HPP_NOEXCEPT : AabbPositionsKHR(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AabbPositionsKHR &operator=(AabbPositionsKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; AabbPositionsKHR &operator=(VkAabbPositionsKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR &setMinX(float minX_) VULKAN_HPP_NOEXCEPT { minX = minX_; return *this; } VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR &setMinY(float minY_) VULKAN_HPP_NOEXCEPT { minY = minY_; return *this; } VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR &setMinZ(float minZ_) VULKAN_HPP_NOEXCEPT { minZ = minZ_; return *this; } VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR &setMaxX(float maxX_) VULKAN_HPP_NOEXCEPT { maxX = maxX_; return *this; } VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR &setMaxY(float maxY_) VULKAN_HPP_NOEXCEPT { maxY = maxY_; return *this; } VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR &setMaxZ(float maxZ_) VULKAN_HPP_NOEXCEPT { maxZ = maxZ_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAabbPositionsKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAabbPositionsKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(minX, minY, minZ, maxX, maxY, maxZ); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(AabbPositionsKHR const &) const = default; #else bool operator==(AabbPositionsKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (minX == rhs.minX) && (minY == rhs.minY) && (minZ == rhs.minZ) && (maxX == rhs.maxX) && (maxY == rhs.maxY) && (maxZ == rhs.maxZ); # endif } bool operator!=(AabbPositionsKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: float minX = {}; float minY = {}; float minZ = {}; float maxX = {}; float maxY = {}; float maxZ = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AabbPositionsKHR) == sizeof(VkAabbPositionsKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AabbPositionsKHR is not nothrow_move_constructible!"); using AabbPositionsNV = AabbPositionsKHR; union DeviceOrHostAddressConstKHR { using NativeType = VkDeviceOrHostAddressConstKHR; #if !defined(VULKAN_HPP_NO_UNION_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}) : deviceAddress(deviceAddress_) {} VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR(const void *hostAddress_) : hostAddress(hostAddress_) {} #endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ #if !defined(VULKAN_HPP_NO_UNION_SETTERS) VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR &setDeviceAddress(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_) VULKAN_HPP_NOEXCEPT { deviceAddress = deviceAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR &setHostAddress(const void *hostAddress_) VULKAN_HPP_NOEXCEPT { hostAddress = hostAddress_; return *this; } #endif /*VULKAN_HPP_NO_UNION_SETTERS*/ operator VkDeviceOrHostAddressConstKHR const &() const { return *reinterpret_cast(this); } operator VkDeviceOrHostAddressConstKHR &() { return *reinterpret_cast(this); } #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress; const void *hostAddress; #else VkDeviceAddress deviceAddress; const void *hostAddress; #endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; struct AccelerationStructureGeometryTrianglesDataKHR { using NativeType = VkAccelerationStructureGeometryTrianglesDataKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR(VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, uint32_t maxVertex_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), vertexFormat(vertexFormat_), vertexData(vertexData_), vertexStride(vertexStride_), maxVertex(maxVertex_), indexType(indexType_), indexData(indexData_), transformData(transformData_) { } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR(AccelerationStructureGeometryTrianglesDataKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureGeometryTrianglesDataKHR(VkAccelerationStructureGeometryTrianglesDataKHR const &rhs) VULKAN_HPP_NOEXCEPT : AccelerationStructureGeometryTrianglesDataKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureGeometryTrianglesDataKHR &operator=(AccelerationStructureGeometryTrianglesDataKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureGeometryTrianglesDataKHR &operator=(VkAccelerationStructureGeometryTrianglesDataKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR &setVertexFormat(VULKAN_HPP_NAMESPACE::Format vertexFormat_) VULKAN_HPP_NOEXCEPT { vertexFormat = vertexFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setVertexData(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &vertexData_) VULKAN_HPP_NOEXCEPT { vertexData = vertexData_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setVertexStride(VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_) VULKAN_HPP_NOEXCEPT { vertexStride = vertexStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR &setMaxVertex(uint32_t maxVertex_) VULKAN_HPP_NOEXCEPT { maxVertex = maxVertex_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR &setIndexType(VULKAN_HPP_NAMESPACE::IndexType indexType_) VULKAN_HPP_NOEXCEPT { indexType = indexType_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setIndexData(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &indexData_) VULKAN_HPP_NOEXCEPT { indexData = indexData_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setTransformData(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &transformData_) VULKAN_HPP_NOEXCEPT { transformData = transformData_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAccelerationStructureGeometryTrianglesDataKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAccelerationStructureGeometryTrianglesDataKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, vertexFormat, vertexData, vertexStride, maxVertex, indexType, indexData, transformData); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {}; VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {}; uint32_t maxVertex = {}; VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData = {}; VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR) == sizeof(VkAccelerationStructureGeometryTrianglesDataKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AccelerationStructureGeometryTrianglesDataKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = AccelerationStructureGeometryTrianglesDataKHR; }; struct AccelerationStructureGeometryAabbsDataKHR { using NativeType = VkAccelerationStructureGeometryAabbsDataKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryAabbsDataKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), data(data_), stride(stride_) { } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR(AccelerationStructureGeometryAabbsDataKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureGeometryAabbsDataKHR(VkAccelerationStructureGeometryAabbsDataKHR const &rhs) VULKAN_HPP_NOEXCEPT : AccelerationStructureGeometryAabbsDataKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureGeometryAabbsDataKHR &operator=(AccelerationStructureGeometryAabbsDataKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureGeometryAabbsDataKHR &operator=(VkAccelerationStructureGeometryAabbsDataKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR & setData(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &data_) VULKAN_HPP_NOEXCEPT { data = data_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR &setStride(VULKAN_HPP_NAMESPACE::DeviceSize stride_) VULKAN_HPP_NOEXCEPT { stride = stride_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAccelerationStructureGeometryAabbsDataKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAccelerationStructureGeometryAabbsDataKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, data, stride); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryAabbsDataKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {}; VULKAN_HPP_NAMESPACE::DeviceSize stride = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR) == sizeof(VkAccelerationStructureGeometryAabbsDataKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AccelerationStructureGeometryAabbsDataKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = AccelerationStructureGeometryAabbsDataKHR; }; struct AccelerationStructureGeometryInstancesDataKHR { using NativeType = VkAccelerationStructureGeometryInstancesDataKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryInstancesDataKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR(VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), arrayOfPointers(arrayOfPointers_), data(data_) { } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR(AccelerationStructureGeometryInstancesDataKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureGeometryInstancesDataKHR(VkAccelerationStructureGeometryInstancesDataKHR const &rhs) VULKAN_HPP_NOEXCEPT : AccelerationStructureGeometryInstancesDataKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureGeometryInstancesDataKHR &operator=(AccelerationStructureGeometryInstancesDataKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureGeometryInstancesDataKHR &operator=(VkAccelerationStructureGeometryInstancesDataKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR & setArrayOfPointers(VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_) VULKAN_HPP_NOEXCEPT { arrayOfPointers = arrayOfPointers_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR & setData(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &data_) VULKAN_HPP_NOEXCEPT { data = data_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAccelerationStructureGeometryInstancesDataKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAccelerationStructureGeometryInstancesDataKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, arrayOfPointers, data); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryInstancesDataKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers = {}; VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR) == sizeof(VkAccelerationStructureGeometryInstancesDataKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AccelerationStructureGeometryInstancesDataKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = AccelerationStructureGeometryInstancesDataKHR; }; union AccelerationStructureGeometryDataKHR { using NativeType = VkAccelerationStructureGeometryDataKHR; #if !defined(VULKAN_HPP_NO_UNION_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles_ = {}) : triangles(triangles_) { } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs_) : aabbs(aabbs_) {} VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances_) : instances(instances_) { } #endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ #if !defined(VULKAN_HPP_NO_UNION_SETTERS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR & setTriangles(VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR const &triangles_) VULKAN_HPP_NOEXCEPT { triangles = triangles_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR & setAabbs(VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR const &aabbs_) VULKAN_HPP_NOEXCEPT { aabbs = aabbs_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR & setInstances(VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR const &instances_) VULKAN_HPP_NOEXCEPT { instances = instances_; return *this; } #endif /*VULKAN_HPP_NO_UNION_SETTERS*/ operator VkAccelerationStructureGeometryDataKHR const &() const { return *reinterpret_cast(this); } operator VkAccelerationStructureGeometryDataKHR &() { return *reinterpret_cast(this); } #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles; VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs; VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances; #else VkAccelerationStructureGeometryTrianglesDataKHR triangles; VkAccelerationStructureGeometryAabbsDataKHR aabbs; VkAccelerationStructureGeometryInstancesDataKHR instances; #endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; struct AccelerationStructureGeometryKHR { using NativeType = VkAccelerationStructureGeometryKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR(VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry_ = {}, VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), geometryType(geometryType_), geometry(geometry_), flags(flags_) { } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR(AccelerationStructureGeometryKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureGeometryKHR(VkAccelerationStructureGeometryKHR const &rhs) VULKAN_HPP_NOEXCEPT : AccelerationStructureGeometryKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureGeometryKHR &operator=(AccelerationStructureGeometryKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureGeometryKHR &operator=(VkAccelerationStructureGeometryKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR &setGeometryType(VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_) VULKAN_HPP_NOEXCEPT { geometryType = geometryType_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setGeometry(VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const &geometry_) VULKAN_HPP_NOEXCEPT { geometry = geometry_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR &setFlags(VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAccelerationStructureGeometryKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAccelerationStructureGeometryKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, geometryType, geometry, flags); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles; VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry = {}; VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR) == sizeof(VkAccelerationStructureGeometryKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AccelerationStructureGeometryKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = AccelerationStructureGeometryKHR; }; union DeviceOrHostAddressKHR { using NativeType = VkDeviceOrHostAddressKHR; #if !defined(VULKAN_HPP_NO_UNION_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}) : deviceAddress(deviceAddress_) {} VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR(void *hostAddress_) : hostAddress(hostAddress_) {} #endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ #if !defined(VULKAN_HPP_NO_UNION_SETTERS) VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR &setDeviceAddress(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_) VULKAN_HPP_NOEXCEPT { deviceAddress = deviceAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR &setHostAddress(void *hostAddress_) VULKAN_HPP_NOEXCEPT { hostAddress = hostAddress_; return *this; } #endif /*VULKAN_HPP_NO_UNION_SETTERS*/ operator VkDeviceOrHostAddressKHR const &() const { return *reinterpret_cast(this); } operator VkDeviceOrHostAddressKHR &() { return *reinterpret_cast(this); } #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress; void *hostAddress; #else VkDeviceAddress deviceAddress; void *hostAddress; #endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; struct AccelerationStructureBuildGeometryInfoKHR { using NativeType = VkAccelerationStructureBuildGeometryInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureBuildGeometryInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR::eBuild, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ = {}, uint32_t geometryCount_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR *pGeometries_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR *const *ppGeometries_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), type(type_), flags(flags_), mode(mode_), srcAccelerationStructure(srcAccelerationStructure_), dstAccelerationStructure(dstAccelerationStructure_), geometryCount(geometryCount_), pGeometries(pGeometries_), ppGeometries(ppGeometries_), scratchData(scratchData_) { } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR(AccelerationStructureBuildGeometryInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureBuildGeometryInfoKHR(VkAccelerationStructureBuildGeometryInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : AccelerationStructureBuildGeometryInfoKHR(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) AccelerationStructureBuildGeometryInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &geometries_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &pGeometries_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , type(type_) , flags(flags_) , mode(mode_) , srcAccelerationStructure(srcAccelerationStructure_) , dstAccelerationStructure(dstAccelerationStructure_) , geometryCount(static_cast(!geometries_.empty() ? geometries_.size() : pGeometries_.size())) , pGeometries(geometries_.data()) , ppGeometries(pGeometries_.data()) , scratchData(scratchData_) { # ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT((!geometries_.empty() + !pGeometries_.empty()) <= 1); # else if(1 < (!geometries_.empty() + !pGeometries_.empty())) { throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::AccelerationStructureBuildGeometryInfoKHR::AccelerationStructureBuildGeometryInfoKHR: 1 < ( !geometries_.empty() + !pGeometries_.empty() )"); } # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureBuildGeometryInfoKHR &operator=(AccelerationStructureBuildGeometryInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureBuildGeometryInfoKHR &operator=(VkAccelerationStructureBuildGeometryInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR &setType(VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_) VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setFlags(VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setMode(VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_) VULKAN_HPP_NOEXCEPT { mode = mode_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setSrcAccelerationStructure(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_) VULKAN_HPP_NOEXCEPT { srcAccelerationStructure = srcAccelerationStructure_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setDstAccelerationStructure(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_) VULKAN_HPP_NOEXCEPT { dstAccelerationStructure = dstAccelerationStructure_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR &setGeometryCount(uint32_t geometryCount_) VULKAN_HPP_NOEXCEPT { geometryCount = geometryCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setPGeometries(const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR *pGeometries_) VULKAN_HPP_NOEXCEPT { pGeometries = pGeometries_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) AccelerationStructureBuildGeometryInfoKHR &setGeometries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &geometries_) VULKAN_HPP_NOEXCEPT { geometryCount = static_cast(geometries_.size()); pGeometries = geometries_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setPpGeometries(const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR *const *ppGeometries_) VULKAN_HPP_NOEXCEPT { ppGeometries = ppGeometries_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) AccelerationStructureBuildGeometryInfoKHR & setPGeometries(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &pGeometries_) VULKAN_HPP_NOEXCEPT { geometryCount = static_cast(pGeometries_.size()); ppGeometries = pGeometries_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setScratchData(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const &scratchData_) VULKAN_HPP_NOEXCEPT { scratchData = scratchData_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAccelerationStructureBuildGeometryInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAccelerationStructureBuildGeometryInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, type, flags, mode, srcAccelerationStructure, dstAccelerationStructure, geometryCount, pGeometries, ppGeometries, scratchData); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildGeometryInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel; VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {}; VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR::eBuild; VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure = {}; VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure = {}; uint32_t geometryCount = {}; const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR *pGeometries = {}; const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR *const *ppGeometries = {}; VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR) == sizeof(VkAccelerationStructureBuildGeometryInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AccelerationStructureBuildGeometryInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = AccelerationStructureBuildGeometryInfoKHR; }; struct AccelerationStructureBuildRangeInfoKHR { using NativeType = VkAccelerationStructureBuildRangeInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR AccelerationStructureBuildRangeInfoKHR(uint32_t primitiveCount_ = {}, uint32_t primitiveOffset_ = {}, uint32_t firstVertex_ = {}, uint32_t transformOffset_ = {}) VULKAN_HPP_NOEXCEPT : primitiveCount(primitiveCount_), primitiveOffset(primitiveOffset_), firstVertex(firstVertex_), transformOffset(transformOffset_) { } VULKAN_HPP_CONSTEXPR AccelerationStructureBuildRangeInfoKHR(AccelerationStructureBuildRangeInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureBuildRangeInfoKHR(VkAccelerationStructureBuildRangeInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : AccelerationStructureBuildRangeInfoKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureBuildRangeInfoKHR &operator=(AccelerationStructureBuildRangeInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureBuildRangeInfoKHR &operator=(VkAccelerationStructureBuildRangeInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR &setPrimitiveCount(uint32_t primitiveCount_) VULKAN_HPP_NOEXCEPT { primitiveCount = primitiveCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR &setPrimitiveOffset(uint32_t primitiveOffset_) VULKAN_HPP_NOEXCEPT { primitiveOffset = primitiveOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR &setFirstVertex(uint32_t firstVertex_) VULKAN_HPP_NOEXCEPT { firstVertex = firstVertex_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR &setTransformOffset(uint32_t transformOffset_) VULKAN_HPP_NOEXCEPT { transformOffset = transformOffset_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAccelerationStructureBuildRangeInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAccelerationStructureBuildRangeInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(primitiveCount, primitiveOffset, firstVertex, transformOffset); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(AccelerationStructureBuildRangeInfoKHR const &) const = default; #else bool operator==(AccelerationStructureBuildRangeInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (primitiveCount == rhs.primitiveCount) && (primitiveOffset == rhs.primitiveOffset) && (firstVertex == rhs.firstVertex) && (transformOffset == rhs.transformOffset); # endif } bool operator!=(AccelerationStructureBuildRangeInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t primitiveCount = {}; uint32_t primitiveOffset = {}; uint32_t firstVertex = {}; uint32_t transformOffset = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR) == sizeof(VkAccelerationStructureBuildRangeInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AccelerationStructureBuildRangeInfoKHR is not nothrow_move_constructible!"); struct AccelerationStructureBuildSizesInfoKHR { using NativeType = VkAccelerationStructureBuildSizesInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureBuildSizesInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR(VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), accelerationStructureSize(accelerationStructureSize_), updateScratchSize(updateScratchSize_), buildScratchSize(buildScratchSize_) { } VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR(AccelerationStructureBuildSizesInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureBuildSizesInfoKHR(VkAccelerationStructureBuildSizesInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : AccelerationStructureBuildSizesInfoKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureBuildSizesInfoKHR &operator=(AccelerationStructureBuildSizesInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureBuildSizesInfoKHR &operator=(VkAccelerationStructureBuildSizesInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR & setAccelerationStructureSize(VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize_) VULKAN_HPP_NOEXCEPT { accelerationStructureSize = accelerationStructureSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR & setUpdateScratchSize(VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize_) VULKAN_HPP_NOEXCEPT { updateScratchSize = updateScratchSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR & setBuildScratchSize(VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_) VULKAN_HPP_NOEXCEPT { buildScratchSize = buildScratchSize_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAccelerationStructureBuildSizesInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAccelerationStructureBuildSizesInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, accelerationStructureSize, updateScratchSize, buildScratchSize); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(AccelerationStructureBuildSizesInfoKHR const &) const = default; #else bool operator==(AccelerationStructureBuildSizesInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (accelerationStructureSize == rhs.accelerationStructureSize) && (updateScratchSize == rhs.updateScratchSize) && (buildScratchSize == rhs.buildScratchSize); # endif } bool operator!=(AccelerationStructureBuildSizesInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildSizesInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize = {}; VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize = {}; VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR) == sizeof(VkAccelerationStructureBuildSizesInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AccelerationStructureBuildSizesInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = AccelerationStructureBuildSizesInfoKHR; }; struct AccelerationStructureCreateInfoKHR { using NativeType = VkAccelerationStructureCreateInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), createFlags(createFlags_), buffer(buffer_), offset(offset_), size(size_), type(type_), deviceAddress(deviceAddress_) { } VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR(AccelerationStructureCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureCreateInfoKHR(VkAccelerationStructureCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : AccelerationStructureCreateInfoKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureCreateInfoKHR &operator=(AccelerationStructureCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureCreateInfoKHR &operator=(VkAccelerationStructureCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setCreateFlags(VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags_) VULKAN_HPP_NOEXCEPT { createFlags = createFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR &setBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer_) VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR &setOffset(VULKAN_HPP_NAMESPACE::DeviceSize offset_) VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR &setSize(VULKAN_HPP_NAMESPACE::DeviceSize size_) VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR &setType(VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_) VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR &setDeviceAddress(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_) VULKAN_HPP_NOEXCEPT { deviceAddress = deviceAddress_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAccelerationStructureCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAccelerationStructureCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, createFlags, buffer, offset, size, type, deviceAddress); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(AccelerationStructureCreateInfoKHR const &) const = default; #else bool operator==(AccelerationStructureCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (createFlags == rhs.createFlags) && (buffer == rhs.buffer) && (offset == rhs.offset) && (size == rhs.size) && (type == rhs.type) && (deviceAddress == rhs.deviceAddress); # endif } bool operator!=(AccelerationStructureCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags = {}; VULKAN_HPP_NAMESPACE::Buffer buffer = {}; VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; VULKAN_HPP_NAMESPACE::DeviceSize size = {}; VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel; VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR) == sizeof(VkAccelerationStructureCreateInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AccelerationStructureCreateInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = AccelerationStructureCreateInfoKHR; }; struct GeometryTrianglesNV { using NativeType = VkGeometryTrianglesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryTrianglesNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR GeometryTrianglesNV(VULKAN_HPP_NAMESPACE::Buffer vertexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ = {}, uint32_t vertexCount_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Buffer indexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ = {}, uint32_t indexCount_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, VULKAN_HPP_NAMESPACE::Buffer transformData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), vertexData(vertexData_), vertexOffset(vertexOffset_), vertexCount(vertexCount_), vertexStride(vertexStride_), vertexFormat(vertexFormat_), indexData(indexData_), indexOffset(indexOffset_), indexCount(indexCount_), indexType(indexType_), transformData(transformData_), transformOffset(transformOffset_) { } VULKAN_HPP_CONSTEXPR GeometryTrianglesNV(GeometryTrianglesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; GeometryTrianglesNV(VkGeometryTrianglesNV const &rhs) VULKAN_HPP_NOEXCEPT : GeometryTrianglesNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ GeometryTrianglesNV &operator=(GeometryTrianglesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; GeometryTrianglesNV &operator=(VkGeometryTrianglesNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV &setVertexData(VULKAN_HPP_NAMESPACE::Buffer vertexData_) VULKAN_HPP_NOEXCEPT { vertexData = vertexData_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV &setVertexOffset(VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_) VULKAN_HPP_NOEXCEPT { vertexOffset = vertexOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV &setVertexCount(uint32_t vertexCount_) VULKAN_HPP_NOEXCEPT { vertexCount = vertexCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV &setVertexStride(VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_) VULKAN_HPP_NOEXCEPT { vertexStride = vertexStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV &setVertexFormat(VULKAN_HPP_NAMESPACE::Format vertexFormat_) VULKAN_HPP_NOEXCEPT { vertexFormat = vertexFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV &setIndexData(VULKAN_HPP_NAMESPACE::Buffer indexData_) VULKAN_HPP_NOEXCEPT { indexData = indexData_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV &setIndexOffset(VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_) VULKAN_HPP_NOEXCEPT { indexOffset = indexOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV &setIndexCount(uint32_t indexCount_) VULKAN_HPP_NOEXCEPT { indexCount = indexCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV &setIndexType(VULKAN_HPP_NAMESPACE::IndexType indexType_) VULKAN_HPP_NOEXCEPT { indexType = indexType_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV &setTransformData(VULKAN_HPP_NAMESPACE::Buffer transformData_) VULKAN_HPP_NOEXCEPT { transformData = transformData_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV &setTransformOffset(VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_) VULKAN_HPP_NOEXCEPT { transformOffset = transformOffset_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkGeometryTrianglesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkGeometryTrianglesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, vertexData, vertexOffset, vertexCount, vertexStride, vertexFormat, indexData, indexOffset, indexCount, indexType, transformData, transformOffset); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(GeometryTrianglesNV const &) const = default; #else bool operator==(GeometryTrianglesNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (vertexData == rhs.vertexData) && (vertexOffset == rhs.vertexOffset) && (vertexCount == rhs.vertexCount) && (vertexStride == rhs.vertexStride) && (vertexFormat == rhs.vertexFormat) && (indexData == rhs.indexData) && (indexOffset == rhs.indexOffset) && (indexCount == rhs.indexCount) && (indexType == rhs.indexType) && (transformData == rhs.transformData) && (transformOffset == rhs.transformOffset); # endif } bool operator!=(GeometryTrianglesNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryTrianglesNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Buffer vertexData = {}; VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset = {}; uint32_t vertexCount = {}; VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {}; VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; VULKAN_HPP_NAMESPACE::Buffer indexData = {}; VULKAN_HPP_NAMESPACE::DeviceSize indexOffset = {}; uint32_t indexCount = {}; VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; VULKAN_HPP_NAMESPACE::Buffer transformData = {}; VULKAN_HPP_NAMESPACE::DeviceSize transformOffset = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::GeometryTrianglesNV) == sizeof(VkGeometryTrianglesNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "GeometryTrianglesNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = GeometryTrianglesNV; }; struct GeometryAABBNV { using NativeType = VkGeometryAABBNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryAabbNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR GeometryAABBNV(VULKAN_HPP_NAMESPACE::Buffer aabbData_ = {}, uint32_t numAABBs_ = {}, uint32_t stride_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), aabbData(aabbData_), numAABBs(numAABBs_), stride(stride_), offset(offset_) { } VULKAN_HPP_CONSTEXPR GeometryAABBNV(GeometryAABBNV const &rhs) VULKAN_HPP_NOEXCEPT = default; GeometryAABBNV(VkGeometryAABBNV const &rhs) VULKAN_HPP_NOEXCEPT : GeometryAABBNV(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ GeometryAABBNV &operator=(GeometryAABBNV const &rhs) VULKAN_HPP_NOEXCEPT = default; GeometryAABBNV &operator=(VkGeometryAABBNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV &setAabbData(VULKAN_HPP_NAMESPACE::Buffer aabbData_) VULKAN_HPP_NOEXCEPT { aabbData = aabbData_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV &setNumAABBs(uint32_t numAABBs_) VULKAN_HPP_NOEXCEPT { numAABBs = numAABBs_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV &setStride(uint32_t stride_) VULKAN_HPP_NOEXCEPT { stride = stride_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV &setOffset(VULKAN_HPP_NAMESPACE::DeviceSize offset_) VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkGeometryAABBNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkGeometryAABBNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, aabbData, numAABBs, stride, offset); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(GeometryAABBNV const &) const = default; #else bool operator==(GeometryAABBNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (aabbData == rhs.aabbData) && (numAABBs == rhs.numAABBs) && (stride == rhs.stride) && (offset == rhs.offset); # endif } bool operator!=(GeometryAABBNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryAabbNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Buffer aabbData = {}; uint32_t numAABBs = {}; uint32_t stride = {}; VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::GeometryAABBNV) == sizeof(VkGeometryAABBNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "GeometryAABBNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = GeometryAABBNV; }; struct GeometryDataNV { using NativeType = VkGeometryDataNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR GeometryDataNV(VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles_ = {}, VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs_ = {}) VULKAN_HPP_NOEXCEPT : triangles(triangles_), aabbs(aabbs_) { } VULKAN_HPP_CONSTEXPR GeometryDataNV(GeometryDataNV const &rhs) VULKAN_HPP_NOEXCEPT = default; GeometryDataNV(VkGeometryDataNV const &rhs) VULKAN_HPP_NOEXCEPT : GeometryDataNV(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ GeometryDataNV &operator=(GeometryDataNV const &rhs) VULKAN_HPP_NOEXCEPT = default; GeometryDataNV &operator=(VkGeometryDataNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 GeometryDataNV &setTriangles(VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const &triangles_) VULKAN_HPP_NOEXCEPT { triangles = triangles_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryDataNV &setAabbs(VULKAN_HPP_NAMESPACE::GeometryAABBNV const &aabbs_) VULKAN_HPP_NOEXCEPT { aabbs = aabbs_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkGeometryDataNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkGeometryDataNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(triangles, aabbs); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(GeometryDataNV const &) const = default; #else bool operator==(GeometryDataNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (triangles == rhs.triangles) && (aabbs == rhs.aabbs); # endif } bool operator!=(GeometryDataNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles = {}; VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::GeometryDataNV) == sizeof(VkGeometryDataNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "GeometryDataNV is not nothrow_move_constructible!"); struct GeometryNV { using NativeType = VkGeometryNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR GeometryNV(VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ = {}, VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), geometryType(geometryType_), geometry(geometry_), flags(flags_) { } VULKAN_HPP_CONSTEXPR GeometryNV(GeometryNV const &rhs) VULKAN_HPP_NOEXCEPT = default; GeometryNV(VkGeometryNV const &rhs) VULKAN_HPP_NOEXCEPT : GeometryNV(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ GeometryNV &operator=(GeometryNV const &rhs) VULKAN_HPP_NOEXCEPT = default; GeometryNV &operator=(VkGeometryNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 GeometryNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryNV &setGeometryType(VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_) VULKAN_HPP_NOEXCEPT { geometryType = geometryType_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryNV &setGeometry(VULKAN_HPP_NAMESPACE::GeometryDataNV const &geometry_) VULKAN_HPP_NOEXCEPT { geometry = geometry_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryNV &setFlags(VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkGeometryNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkGeometryNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, geometryType, geometry, flags); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(GeometryNV const &) const = default; #else bool operator==(GeometryNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (geometryType == rhs.geometryType) && (geometry == rhs.geometry) && (flags == rhs.flags); # endif } bool operator!=(GeometryNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles; VULKAN_HPP_NAMESPACE::GeometryDataNV geometry = {}; VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::GeometryNV) == sizeof(VkGeometryNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "GeometryNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = GeometryNV; }; struct AccelerationStructureInfoNV { using NativeType = VkAccelerationStructureInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureInfoNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV(VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ = {}, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ = {}, uint32_t instanceCount_ = {}, uint32_t geometryCount_ = {}, const VULKAN_HPP_NAMESPACE::GeometryNV *pGeometries_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), type(type_), flags(flags_), instanceCount(instanceCount_), geometryCount(geometryCount_), pGeometries(pGeometries_) { } VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV(AccelerationStructureInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureInfoNV(VkAccelerationStructureInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : AccelerationStructureInfoNV(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) AccelerationStructureInfoNV(VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_, uint32_t instanceCount_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &geometries_, const void *pNext_ = nullptr) : pNext(pNext_) , type(type_) , flags(flags_) , instanceCount(instanceCount_) , geometryCount(static_cast(geometries_.size())) , pGeometries(geometries_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureInfoNV &operator=(AccelerationStructureInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureInfoNV &operator=(VkAccelerationStructureInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV &setType(VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_) VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV &setFlags(VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV &setInstanceCount(uint32_t instanceCount_) VULKAN_HPP_NOEXCEPT { instanceCount = instanceCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV &setGeometryCount(uint32_t geometryCount_) VULKAN_HPP_NOEXCEPT { geometryCount = geometryCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV &setPGeometries(const VULKAN_HPP_NAMESPACE::GeometryNV *pGeometries_) VULKAN_HPP_NOEXCEPT { pGeometries = pGeometries_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) AccelerationStructureInfoNV & setGeometries(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &geometries_) VULKAN_HPP_NOEXCEPT { geometryCount = static_cast(geometries_.size()); pGeometries = geometries_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAccelerationStructureInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAccelerationStructureInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, type, flags, instanceCount, geometryCount, pGeometries); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(AccelerationStructureInfoNV const &) const = default; #else bool operator==(AccelerationStructureInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (type == rhs.type) && (flags == rhs.flags) && (instanceCount == rhs.instanceCount) && (geometryCount == rhs.geometryCount) && (pGeometries == rhs.pGeometries); # endif } bool operator!=(AccelerationStructureInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureInfoNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type = {}; VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags = {}; uint32_t instanceCount = {}; uint32_t geometryCount = {}; const VULKAN_HPP_NAMESPACE::GeometryNV *pGeometries = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV) == sizeof(VkAccelerationStructureInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AccelerationStructureInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = AccelerationStructureInfoNV; }; struct AccelerationStructureCreateInfoNV { using NativeType = VkAccelerationStructureCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateInfoNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV(VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), compactedSize(compactedSize_), info(info_) { } VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV(AccelerationStructureCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureCreateInfoNV(VkAccelerationStructureCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : AccelerationStructureCreateInfoNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureCreateInfoNV &operator=(AccelerationStructureCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureCreateInfoNV &operator=(VkAccelerationStructureCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV &setCompactedSize(VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_) VULKAN_HPP_NOEXCEPT { compactedSize = compactedSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV &setInfo(VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const &info_) VULKAN_HPP_NOEXCEPT { info = info_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAccelerationStructureCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAccelerationStructureCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, compactedSize, info); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(AccelerationStructureCreateInfoNV const &) const = default; #else bool operator==(AccelerationStructureCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (compactedSize == rhs.compactedSize) && (info == rhs.info); # endif } bool operator!=(AccelerationStructureCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DeviceSize compactedSize = {}; VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV) == sizeof(VkAccelerationStructureCreateInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AccelerationStructureCreateInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = AccelerationStructureCreateInfoNV; }; struct AccelerationStructureDeviceAddressInfoKHR { using NativeType = VkAccelerationStructureDeviceAddressInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureDeviceAddressInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), accelerationStructure(accelerationStructure_) { } VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR(AccelerationStructureDeviceAddressInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureDeviceAddressInfoKHR(VkAccelerationStructureDeviceAddressInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : AccelerationStructureDeviceAddressInfoKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureDeviceAddressInfoKHR &operator=(AccelerationStructureDeviceAddressInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureDeviceAddressInfoKHR &operator=(VkAccelerationStructureDeviceAddressInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDeviceAddressInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDeviceAddressInfoKHR & setAccelerationStructure(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_) VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAccelerationStructureDeviceAddressInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAccelerationStructureDeviceAddressInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, accelerationStructure); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(AccelerationStructureDeviceAddressInfoKHR const &) const = default; #else bool operator==(AccelerationStructureDeviceAddressInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (accelerationStructure == rhs.accelerationStructure); # endif } bool operator!=(AccelerationStructureDeviceAddressInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureDeviceAddressInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR) == sizeof(VkAccelerationStructureDeviceAddressInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AccelerationStructureDeviceAddressInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = AccelerationStructureDeviceAddressInfoKHR; }; struct AccelerationStructureGeometryMotionTrianglesDataNV { using NativeType = VkAccelerationStructureGeometryMotionTrianglesDataNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), vertexData(vertexData_) { } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV(AccelerationStructureGeometryMotionTrianglesDataNV const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureGeometryMotionTrianglesDataNV(VkAccelerationStructureGeometryMotionTrianglesDataNV const &rhs) VULKAN_HPP_NOEXCEPT : AccelerationStructureGeometryMotionTrianglesDataNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureGeometryMotionTrianglesDataNV & operator=(AccelerationStructureGeometryMotionTrianglesDataNV const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureGeometryMotionTrianglesDataNV &operator=(VkAccelerationStructureGeometryMotionTrianglesDataNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV & setVertexData(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &vertexData_) VULKAN_HPP_NOEXCEPT { vertexData = vertexData_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAccelerationStructureGeometryMotionTrianglesDataNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAccelerationStructureGeometryMotionTrianglesDataNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, vertexData); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryMotionTrianglesDataNV) == sizeof(VkAccelerationStructureGeometryMotionTrianglesDataNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AccelerationStructureGeometryMotionTrianglesDataNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = AccelerationStructureGeometryMotionTrianglesDataNV; }; struct TransformMatrixKHR { using NativeType = VkTransformMatrixKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR(std::array, 3> const &matrix_ = {}) VULKAN_HPP_NOEXCEPT : matrix(matrix_) {} VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR(TransformMatrixKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; TransformMatrixKHR(VkTransformMatrixKHR const &rhs) VULKAN_HPP_NOEXCEPT : TransformMatrixKHR(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ TransformMatrixKHR &operator=(TransformMatrixKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; TransformMatrixKHR &operator=(VkTransformMatrixKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR &setMatrix(std::array, 3> matrix_) VULKAN_HPP_NOEXCEPT { matrix = matrix_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkTransformMatrixKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkTransformMatrixKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(matrix); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(TransformMatrixKHR const &) const = default; #else bool operator==(TransformMatrixKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (matrix == rhs.matrix); # endif } bool operator!=(TransformMatrixKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::ArrayWrapper2D matrix = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::TransformMatrixKHR) == sizeof(VkTransformMatrixKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "TransformMatrixKHR is not nothrow_move_constructible!"); using TransformMatrixNV = TransformMatrixKHR; struct AccelerationStructureInstanceKHR { using NativeType = VkAccelerationStructureInstanceKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR(VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform_ = {}, uint32_t instanceCustomIndex_ = {}, uint32_t mask_ = {}, uint32_t instanceShaderBindingTableRecordOffset_ = {}, VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, uint64_t accelerationStructureReference_ = {}) VULKAN_HPP_NOEXCEPT : transform(transform_), instanceCustomIndex(instanceCustomIndex_), mask(mask_), instanceShaderBindingTableRecordOffset(instanceShaderBindingTableRecordOffset_), flags(flags_), accelerationStructureReference(accelerationStructureReference_) { } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR(AccelerationStructureInstanceKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureInstanceKHR(VkAccelerationStructureInstanceKHR const &rhs) VULKAN_HPP_NOEXCEPT : AccelerationStructureInstanceKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureInstanceKHR &operator=(AccelerationStructureInstanceKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureInstanceKHR &operator=(VkAccelerationStructureInstanceKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR &setTransform(VULKAN_HPP_NAMESPACE::TransformMatrixKHR const &transform_) VULKAN_HPP_NOEXCEPT { transform = transform_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR &setInstanceCustomIndex(uint32_t instanceCustomIndex_) VULKAN_HPP_NOEXCEPT { instanceCustomIndex = instanceCustomIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR &setMask(uint32_t mask_) VULKAN_HPP_NOEXCEPT { mask = mask_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setInstanceShaderBindingTableRecordOffset(uint32_t instanceShaderBindingTableRecordOffset_) VULKAN_HPP_NOEXCEPT { instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_; return *this; } AccelerationStructureInstanceKHR &setFlags(VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_) VULKAN_HPP_NOEXCEPT { flags = *reinterpret_cast(&flags_); return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR &setAccelerationStructureReference(uint64_t accelerationStructureReference_) VULKAN_HPP_NOEXCEPT { accelerationStructureReference = accelerationStructureReference_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAccelerationStructureInstanceKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAccelerationStructureInstanceKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(transform, instanceCustomIndex, mask, instanceShaderBindingTableRecordOffset, flags, accelerationStructureReference); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(AccelerationStructureInstanceKHR const &) const = default; #else bool operator==(AccelerationStructureInstanceKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (transform == rhs.transform) && (instanceCustomIndex == rhs.instanceCustomIndex) && (mask == rhs.mask) && (instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset) && (flags == rhs.flags) && (accelerationStructureReference == rhs.accelerationStructureReference); # endif } bool operator!=(AccelerationStructureInstanceKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform = {}; uint32_t instanceCustomIndex : 24; uint32_t mask : 8; uint32_t instanceShaderBindingTableRecordOffset : 24; VkGeometryInstanceFlagsKHR flags : 8; uint64_t accelerationStructureReference = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR) == sizeof(VkAccelerationStructureInstanceKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AccelerationStructureInstanceKHR is not nothrow_move_constructible!"); using AccelerationStructureInstanceNV = AccelerationStructureInstanceKHR; struct AccelerationStructureMatrixMotionInstanceNV { using NativeType = VkAccelerationStructureMatrixMotionInstanceNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV(VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT0_ = {}, VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT1_ = {}, uint32_t instanceCustomIndex_ = {}, uint32_t mask_ = {}, uint32_t instanceShaderBindingTableRecordOffset_ = {}, VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, uint64_t accelerationStructureReference_ = {}) VULKAN_HPP_NOEXCEPT : transformT0(transformT0_), transformT1(transformT1_), instanceCustomIndex(instanceCustomIndex_), mask(mask_), instanceShaderBindingTableRecordOffset(instanceShaderBindingTableRecordOffset_), flags(flags_), accelerationStructureReference(accelerationStructureReference_) { } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV(AccelerationStructureMatrixMotionInstanceNV const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureMatrixMotionInstanceNV(VkAccelerationStructureMatrixMotionInstanceNV const &rhs) VULKAN_HPP_NOEXCEPT : AccelerationStructureMatrixMotionInstanceNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureMatrixMotionInstanceNV &operator=(AccelerationStructureMatrixMotionInstanceNV const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureMatrixMotionInstanceNV &operator=(VkAccelerationStructureMatrixMotionInstanceNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setTransformT0(VULKAN_HPP_NAMESPACE::TransformMatrixKHR const &transformT0_) VULKAN_HPP_NOEXCEPT { transformT0 = transformT0_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setTransformT1(VULKAN_HPP_NAMESPACE::TransformMatrixKHR const &transformT1_) VULKAN_HPP_NOEXCEPT { transformT1 = transformT1_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV &setInstanceCustomIndex(uint32_t instanceCustomIndex_) VULKAN_HPP_NOEXCEPT { instanceCustomIndex = instanceCustomIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV &setMask(uint32_t mask_) VULKAN_HPP_NOEXCEPT { mask = mask_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setInstanceShaderBindingTableRecordOffset(uint32_t instanceShaderBindingTableRecordOffset_) VULKAN_HPP_NOEXCEPT { instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_; return *this; } AccelerationStructureMatrixMotionInstanceNV &setFlags(VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_) VULKAN_HPP_NOEXCEPT { flags = *reinterpret_cast(&flags_); return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setAccelerationStructureReference(uint64_t accelerationStructureReference_) VULKAN_HPP_NOEXCEPT { accelerationStructureReference = accelerationStructureReference_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAccelerationStructureMatrixMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAccelerationStructureMatrixMotionInstanceNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(transformT0, transformT1, instanceCustomIndex, mask, instanceShaderBindingTableRecordOffset, flags, accelerationStructureReference); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(AccelerationStructureMatrixMotionInstanceNV const &) const = default; #else bool operator==(AccelerationStructureMatrixMotionInstanceNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (transformT0 == rhs.transformT0) && (transformT1 == rhs.transformT1) && (instanceCustomIndex == rhs.instanceCustomIndex) && (mask == rhs.mask) && (instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset) && (flags == rhs.flags) && (accelerationStructureReference == rhs.accelerationStructureReference); # endif } bool operator!=(AccelerationStructureMatrixMotionInstanceNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT0 = {}; VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT1 = {}; uint32_t instanceCustomIndex : 24; uint32_t mask : 8; uint32_t instanceShaderBindingTableRecordOffset : 24; VkGeometryInstanceFlagsKHR flags : 8; uint64_t accelerationStructureReference = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV) == sizeof(VkAccelerationStructureMatrixMotionInstanceNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AccelerationStructureMatrixMotionInstanceNV is not nothrow_move_constructible!"); struct AccelerationStructureMemoryRequirementsInfoNV { using NativeType = VkAccelerationStructureMemoryRequirementsInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject, VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), type(type_), accelerationStructure(accelerationStructure_) { } VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV(AccelerationStructureMemoryRequirementsInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureMemoryRequirementsInfoNV(VkAccelerationStructureMemoryRequirementsInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : AccelerationStructureMemoryRequirementsInfoNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureMemoryRequirementsInfoNV &operator=(AccelerationStructureMemoryRequirementsInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureMemoryRequirementsInfoNV &operator=(VkAccelerationStructureMemoryRequirementsInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & setType(VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_) VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & setAccelerationStructure(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_) VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAccelerationStructureMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAccelerationStructureMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, type, accelerationStructure); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(AccelerationStructureMemoryRequirementsInfoNV const &) const = default; #else bool operator==(AccelerationStructureMemoryRequirementsInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (type == rhs.type) && (accelerationStructure == rhs.accelerationStructure); # endif } bool operator!=(AccelerationStructureMemoryRequirementsInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject; VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV) == sizeof(VkAccelerationStructureMemoryRequirementsInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AccelerationStructureMemoryRequirementsInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = AccelerationStructureMemoryRequirementsInfoNV; }; struct AccelerationStructureMotionInfoNV { using NativeType = VkAccelerationStructureMotionInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureMotionInfoNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR AccelerationStructureMotionInfoNV(uint32_t maxInstances_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), maxInstances(maxInstances_), flags(flags_) { } VULKAN_HPP_CONSTEXPR AccelerationStructureMotionInfoNV(AccelerationStructureMotionInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureMotionInfoNV(VkAccelerationStructureMotionInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : AccelerationStructureMotionInfoNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureMotionInfoNV &operator=(AccelerationStructureMotionInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureMotionInfoNV &operator=(VkAccelerationStructureMotionInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV &setMaxInstances(uint32_t maxInstances_) VULKAN_HPP_NOEXCEPT { maxInstances = maxInstances_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV & setFlags(VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAccelerationStructureMotionInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAccelerationStructureMotionInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, maxInstances, flags); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(AccelerationStructureMotionInfoNV const &) const = default; #else bool operator==(AccelerationStructureMotionInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (maxInstances == rhs.maxInstances) && (flags == rhs.flags); # endif } bool operator!=(AccelerationStructureMotionInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMotionInfoNV; const void *pNext = {}; uint32_t maxInstances = {}; VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV) == sizeof(VkAccelerationStructureMotionInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AccelerationStructureMotionInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = AccelerationStructureMotionInfoNV; }; struct SRTDataNV { using NativeType = VkSRTDataNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SRTDataNV(float sx_ = {}, float a_ = {}, float b_ = {}, float pvx_ = {}, float sy_ = {}, float c_ = {}, float pvy_ = {}, float sz_ = {}, float pvz_ = {}, float qx_ = {}, float qy_ = {}, float qz_ = {}, float qw_ = {}, float tx_ = {}, float ty_ = {}, float tz_ = {}) VULKAN_HPP_NOEXCEPT : sx(sx_), a(a_), b(b_), pvx(pvx_), sy(sy_), c(c_), pvy(pvy_), sz(sz_), pvz(pvz_), qx(qx_), qy(qy_), qz(qz_), qw(qw_), tx(tx_), ty(ty_), tz(tz_) { } VULKAN_HPP_CONSTEXPR SRTDataNV(SRTDataNV const &rhs) VULKAN_HPP_NOEXCEPT = default; SRTDataNV(VkSRTDataNV const &rhs) VULKAN_HPP_NOEXCEPT : SRTDataNV(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SRTDataNV &operator=(SRTDataNV const &rhs) VULKAN_HPP_NOEXCEPT = default; SRTDataNV &operator=(VkSRTDataNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SRTDataNV &setSx(float sx_) VULKAN_HPP_NOEXCEPT { sx = sx_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV &setA(float a_) VULKAN_HPP_NOEXCEPT { a = a_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV &setB(float b_) VULKAN_HPP_NOEXCEPT { b = b_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV &setPvx(float pvx_) VULKAN_HPP_NOEXCEPT { pvx = pvx_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV &setSy(float sy_) VULKAN_HPP_NOEXCEPT { sy = sy_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV &setC(float c_) VULKAN_HPP_NOEXCEPT { c = c_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV &setPvy(float pvy_) VULKAN_HPP_NOEXCEPT { pvy = pvy_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV &setSz(float sz_) VULKAN_HPP_NOEXCEPT { sz = sz_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV &setPvz(float pvz_) VULKAN_HPP_NOEXCEPT { pvz = pvz_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV &setQx(float qx_) VULKAN_HPP_NOEXCEPT { qx = qx_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV &setQy(float qy_) VULKAN_HPP_NOEXCEPT { qy = qy_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV &setQz(float qz_) VULKAN_HPP_NOEXCEPT { qz = qz_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV &setQw(float qw_) VULKAN_HPP_NOEXCEPT { qw = qw_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV &setTx(float tx_) VULKAN_HPP_NOEXCEPT { tx = tx_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV &setTy(float ty_) VULKAN_HPP_NOEXCEPT { ty = ty_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV &setTz(float tz_) VULKAN_HPP_NOEXCEPT { tz = tz_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSRTDataNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSRTDataNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sx, a, b, pvx, sy, c, pvy, sz, pvz, qx, qy, qz, qw, tx, ty, tz); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SRTDataNV const &) const = default; #else bool operator==(SRTDataNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sx == rhs.sx) && (a == rhs.a) && (b == rhs.b) && (pvx == rhs.pvx) && (sy == rhs.sy) && (c == rhs.c) && (pvy == rhs.pvy) && (sz == rhs.sz) && (pvz == rhs.pvz) && (qx == rhs.qx) && (qy == rhs.qy) && (qz == rhs.qz) && (qw == rhs.qw) && (tx == rhs.tx) && (ty == rhs.ty) && (tz == rhs.tz); # endif } bool operator!=(SRTDataNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: float sx = {}; float a = {}; float b = {}; float pvx = {}; float sy = {}; float c = {}; float pvy = {}; float sz = {}; float pvz = {}; float qx = {}; float qy = {}; float qz = {}; float qw = {}; float tx = {}; float ty = {}; float tz = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SRTDataNV) == sizeof(VkSRTDataNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SRTDataNV is not nothrow_move_constructible!"); struct AccelerationStructureSRTMotionInstanceNV { using NativeType = VkAccelerationStructureSRTMotionInstanceNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR AccelerationStructureSRTMotionInstanceNV(VULKAN_HPP_NAMESPACE::SRTDataNV transformT0_ = {}, VULKAN_HPP_NAMESPACE::SRTDataNV transformT1_ = {}, uint32_t instanceCustomIndex_ = {}, uint32_t mask_ = {}, uint32_t instanceShaderBindingTableRecordOffset_ = {}, VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, uint64_t accelerationStructureReference_ = {}) VULKAN_HPP_NOEXCEPT : transformT0(transformT0_), transformT1(transformT1_), instanceCustomIndex(instanceCustomIndex_), mask(mask_), instanceShaderBindingTableRecordOffset(instanceShaderBindingTableRecordOffset_), flags(flags_), accelerationStructureReference(accelerationStructureReference_) { } VULKAN_HPP_CONSTEXPR AccelerationStructureSRTMotionInstanceNV(AccelerationStructureSRTMotionInstanceNV const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureSRTMotionInstanceNV(VkAccelerationStructureSRTMotionInstanceNV const &rhs) VULKAN_HPP_NOEXCEPT : AccelerationStructureSRTMotionInstanceNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureSRTMotionInstanceNV &operator=(AccelerationStructureSRTMotionInstanceNV const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureSRTMotionInstanceNV &operator=(VkAccelerationStructureSRTMotionInstanceNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setTransformT0(VULKAN_HPP_NAMESPACE::SRTDataNV const &transformT0_) VULKAN_HPP_NOEXCEPT { transformT0 = transformT0_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setTransformT1(VULKAN_HPP_NAMESPACE::SRTDataNV const &transformT1_) VULKAN_HPP_NOEXCEPT { transformT1 = transformT1_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV &setInstanceCustomIndex(uint32_t instanceCustomIndex_) VULKAN_HPP_NOEXCEPT { instanceCustomIndex = instanceCustomIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV &setMask(uint32_t mask_) VULKAN_HPP_NOEXCEPT { mask = mask_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setInstanceShaderBindingTableRecordOffset(uint32_t instanceShaderBindingTableRecordOffset_) VULKAN_HPP_NOEXCEPT { instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_; return *this; } AccelerationStructureSRTMotionInstanceNV &setFlags(VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_) VULKAN_HPP_NOEXCEPT { flags = *reinterpret_cast(&flags_); return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setAccelerationStructureReference(uint64_t accelerationStructureReference_) VULKAN_HPP_NOEXCEPT { accelerationStructureReference = accelerationStructureReference_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAccelerationStructureSRTMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAccelerationStructureSRTMotionInstanceNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(transformT0, transformT1, instanceCustomIndex, mask, instanceShaderBindingTableRecordOffset, flags, accelerationStructureReference); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(AccelerationStructureSRTMotionInstanceNV const &) const = default; #else bool operator==(AccelerationStructureSRTMotionInstanceNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (transformT0 == rhs.transformT0) && (transformT1 == rhs.transformT1) && (instanceCustomIndex == rhs.instanceCustomIndex) && (mask == rhs.mask) && (instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset) && (flags == rhs.flags) && (accelerationStructureReference == rhs.accelerationStructureReference); # endif } bool operator!=(AccelerationStructureSRTMotionInstanceNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::SRTDataNV transformT0 = {}; VULKAN_HPP_NAMESPACE::SRTDataNV transformT1 = {}; uint32_t instanceCustomIndex : 24; uint32_t mask : 8; uint32_t instanceShaderBindingTableRecordOffset : 24; VkGeometryInstanceFlagsKHR flags : 8; uint64_t accelerationStructureReference = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV) == sizeof(VkAccelerationStructureSRTMotionInstanceNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AccelerationStructureSRTMotionInstanceNV is not nothrow_move_constructible!"); union AccelerationStructureMotionInstanceDataNV { using NativeType = VkAccelerationStructureMotionInstanceDataNV; #if !defined(VULKAN_HPP_NO_UNION_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV(VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR staticInstance_ = {}) : staticInstance(staticInstance_) { } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV(VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV matrixMotionInstance_) : matrixMotionInstance(matrixMotionInstance_) { } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV(VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV srtMotionInstance_) : srtMotionInstance(srtMotionInstance_) { } #endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ #if !defined(VULKAN_HPP_NO_UNION_SETTERS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV & setStaticInstance(VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR const &staticInstance_) VULKAN_HPP_NOEXCEPT { staticInstance = staticInstance_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV & setMatrixMotionInstance(VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV const &matrixMotionInstance_) VULKAN_HPP_NOEXCEPT { matrixMotionInstance = matrixMotionInstance_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV & setSrtMotionInstance(VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV const &srtMotionInstance_) VULKAN_HPP_NOEXCEPT { srtMotionInstance = srtMotionInstance_; return *this; } #endif /*VULKAN_HPP_NO_UNION_SETTERS*/ operator VkAccelerationStructureMotionInstanceDataNV const &() const { return *reinterpret_cast(this); } operator VkAccelerationStructureMotionInstanceDataNV &() { return *reinterpret_cast(this); } #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR staticInstance; VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV matrixMotionInstance; VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV srtMotionInstance; #else VkAccelerationStructureInstanceKHR staticInstance; VkAccelerationStructureMatrixMotionInstanceNV matrixMotionInstance; VkAccelerationStructureSRTMotionInstanceNV srtMotionInstance; #endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; struct AccelerationStructureMotionInstanceNV { using NativeType = VkAccelerationStructureMotionInstanceNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV::eStatic, VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV data_ = {}) VULKAN_HPP_NOEXCEPT : type(type_), flags(flags_), data(data_) { } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV(AccelerationStructureMotionInstanceNV const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureMotionInstanceNV(VkAccelerationStructureMotionInstanceNV const &rhs) VULKAN_HPP_NOEXCEPT : AccelerationStructureMotionInstanceNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureMotionInstanceNV &operator=(AccelerationStructureMotionInstanceNV const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureMotionInstanceNV &operator=(VkAccelerationStructureMotionInstanceNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV & setType(VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type_) VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV & setFlags(VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV & setData(VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV const &data_) VULKAN_HPP_NOEXCEPT { data = data_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAccelerationStructureMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAccelerationStructureMotionInstanceNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(type, flags, data); } #endif public: VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type = VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV::eStatic; VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV flags = {}; VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV data = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceNV) == sizeof(VkAccelerationStructureMotionInstanceNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AccelerationStructureMotionInstanceNV is not nothrow_move_constructible!"); struct AccelerationStructureVersionInfoKHR { using NativeType = VkAccelerationStructureVersionInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureVersionInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR(const uint8_t *pVersionData_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), pVersionData(pVersionData_) { } VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR(AccelerationStructureVersionInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureVersionInfoKHR(VkAccelerationStructureVersionInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : AccelerationStructureVersionInfoKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureVersionInfoKHR &operator=(AccelerationStructureVersionInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureVersionInfoKHR &operator=(VkAccelerationStructureVersionInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureVersionInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureVersionInfoKHR &setPVersionData(const uint8_t *pVersionData_) VULKAN_HPP_NOEXCEPT { pVersionData = pVersionData_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAccelerationStructureVersionInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAccelerationStructureVersionInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, pVersionData); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(AccelerationStructureVersionInfoKHR const &) const = default; #else bool operator==(AccelerationStructureVersionInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (pVersionData == rhs.pVersionData); # endif } bool operator!=(AccelerationStructureVersionInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureVersionInfoKHR; const void *pNext = {}; const uint8_t *pVersionData = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR) == sizeof(VkAccelerationStructureVersionInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AccelerationStructureVersionInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = AccelerationStructureVersionInfoKHR; }; struct AcquireNextImageInfoKHR { using NativeType = VkAcquireNextImageInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireNextImageInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, uint64_t timeout_ = {}, VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::Fence fence_ = {}, uint32_t deviceMask_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), swapchain(swapchain_), timeout(timeout_), semaphore(semaphore_), fence(fence_), deviceMask(deviceMask_) { } VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR(AcquireNextImageInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; AcquireNextImageInfoKHR(VkAcquireNextImageInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : AcquireNextImageInfoKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AcquireNextImageInfoKHR &operator=(AcquireNextImageInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; AcquireNextImageInfoKHR &operator=(VkAcquireNextImageInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR &setSwapchain(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_) VULKAN_HPP_NOEXCEPT { swapchain = swapchain_; return *this; } VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR &setTimeout(uint64_t timeout_) VULKAN_HPP_NOEXCEPT { timeout = timeout_; return *this; } VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR &setSemaphore(VULKAN_HPP_NAMESPACE::Semaphore semaphore_) VULKAN_HPP_NOEXCEPT { semaphore = semaphore_; return *this; } VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR &setFence(VULKAN_HPP_NAMESPACE::Fence fence_) VULKAN_HPP_NOEXCEPT { fence = fence_; return *this; } VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR &setDeviceMask(uint32_t deviceMask_) VULKAN_HPP_NOEXCEPT { deviceMask = deviceMask_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAcquireNextImageInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAcquireNextImageInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, swapchain, timeout, semaphore, fence, deviceMask); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(AcquireNextImageInfoKHR const &) const = default; #else bool operator==(AcquireNextImageInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (swapchain == rhs.swapchain) && (timeout == rhs.timeout) && (semaphore == rhs.semaphore) && (fence == rhs.fence) && (deviceMask == rhs.deviceMask); # endif } bool operator!=(AcquireNextImageInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireNextImageInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; uint64_t timeout = {}; VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; VULKAN_HPP_NAMESPACE::Fence fence = {}; uint32_t deviceMask = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR) == sizeof(VkAcquireNextImageInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AcquireNextImageInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = AcquireNextImageInfoKHR; }; struct AcquireProfilingLockInfoKHR { using NativeType = VkAcquireProfilingLockInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireProfilingLockInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR(VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ = {}, uint64_t timeout_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), timeout(timeout_) { } VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR(AcquireProfilingLockInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; AcquireProfilingLockInfoKHR(VkAcquireProfilingLockInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : AcquireProfilingLockInfoKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AcquireProfilingLockInfoKHR &operator=(AcquireProfilingLockInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; AcquireProfilingLockInfoKHR &operator=(VkAcquireProfilingLockInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR &setFlags(VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR &setTimeout(uint64_t timeout_) VULKAN_HPP_NOEXCEPT { timeout = timeout_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAcquireProfilingLockInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAcquireProfilingLockInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, timeout); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(AcquireProfilingLockInfoKHR const &) const = default; #else bool operator==(AcquireProfilingLockInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (timeout == rhs.timeout); # endif } bool operator!=(AcquireProfilingLockInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireProfilingLockInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags = {}; uint64_t timeout = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR) == sizeof(VkAcquireProfilingLockInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AcquireProfilingLockInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = AcquireProfilingLockInfoKHR; }; struct AllocationCallbacks { using NativeType = VkAllocationCallbacks; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR AllocationCallbacks(void *pUserData_ = {}, PFN_vkAllocationFunction pfnAllocation_ = {}, PFN_vkReallocationFunction pfnReallocation_ = {}, PFN_vkFreeFunction pfnFree_ = {}, PFN_vkInternalAllocationNotification pfnInternalAllocation_ = {}, PFN_vkInternalFreeNotification pfnInternalFree_ = {}) VULKAN_HPP_NOEXCEPT : pUserData(pUserData_), pfnAllocation(pfnAllocation_), pfnReallocation(pfnReallocation_), pfnFree(pfnFree_), pfnInternalAllocation(pfnInternalAllocation_), pfnInternalFree(pfnInternalFree_) { } VULKAN_HPP_CONSTEXPR AllocationCallbacks(AllocationCallbacks const &rhs) VULKAN_HPP_NOEXCEPT = default; AllocationCallbacks(VkAllocationCallbacks const &rhs) VULKAN_HPP_NOEXCEPT : AllocationCallbacks(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AllocationCallbacks &operator=(AllocationCallbacks const &rhs) VULKAN_HPP_NOEXCEPT = default; AllocationCallbacks &operator=(VkAllocationCallbacks const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks &setPUserData(void *pUserData_) VULKAN_HPP_NOEXCEPT { pUserData = pUserData_; return *this; } VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks &setPfnAllocation(PFN_vkAllocationFunction pfnAllocation_) VULKAN_HPP_NOEXCEPT { pfnAllocation = pfnAllocation_; return *this; } VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks &setPfnReallocation(PFN_vkReallocationFunction pfnReallocation_) VULKAN_HPP_NOEXCEPT { pfnReallocation = pfnReallocation_; return *this; } VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks &setPfnFree(PFN_vkFreeFunction pfnFree_) VULKAN_HPP_NOEXCEPT { pfnFree = pfnFree_; return *this; } VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks &setPfnInternalAllocation(PFN_vkInternalAllocationNotification pfnInternalAllocation_) VULKAN_HPP_NOEXCEPT { pfnInternalAllocation = pfnInternalAllocation_; return *this; } VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks &setPfnInternalFree(PFN_vkInternalFreeNotification pfnInternalFree_) VULKAN_HPP_NOEXCEPT { pfnInternalFree = pfnInternalFree_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAllocationCallbacks const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAllocationCallbacks &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(pUserData, pfnAllocation, pfnReallocation, pfnFree, pfnInternalAllocation, pfnInternalFree); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(AllocationCallbacks const &) const = default; #else bool operator==(AllocationCallbacks const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (pUserData == rhs.pUserData) && (pfnAllocation == rhs.pfnAllocation) && (pfnReallocation == rhs.pfnReallocation) && (pfnFree == rhs.pfnFree) && (pfnInternalAllocation == rhs.pfnInternalAllocation) && (pfnInternalFree == rhs.pfnInternalFree); # endif } bool operator!=(AllocationCallbacks const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: void *pUserData = {}; PFN_vkAllocationFunction pfnAllocation = {}; PFN_vkReallocationFunction pfnReallocation = {}; PFN_vkFreeFunction pfnFree = {}; PFN_vkInternalAllocationNotification pfnInternalAllocation = {}; PFN_vkInternalFreeNotification pfnInternalFree = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AllocationCallbacks) == sizeof(VkAllocationCallbacks), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AllocationCallbacks is not nothrow_move_constructible!"); struct ComponentMapping { using NativeType = VkComponentMapping; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ComponentMapping(VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity) VULKAN_HPP_NOEXCEPT : r(r_), g(g_), b(b_), a(a_) { } VULKAN_HPP_CONSTEXPR ComponentMapping(ComponentMapping const &rhs) VULKAN_HPP_NOEXCEPT = default; ComponentMapping(VkComponentMapping const &rhs) VULKAN_HPP_NOEXCEPT : ComponentMapping(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ComponentMapping &operator=(ComponentMapping const &rhs) VULKAN_HPP_NOEXCEPT = default; ComponentMapping &operator=(VkComponentMapping const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ComponentMapping &setR(VULKAN_HPP_NAMESPACE::ComponentSwizzle r_) VULKAN_HPP_NOEXCEPT { r = r_; return *this; } VULKAN_HPP_CONSTEXPR_14 ComponentMapping &setG(VULKAN_HPP_NAMESPACE::ComponentSwizzle g_) VULKAN_HPP_NOEXCEPT { g = g_; return *this; } VULKAN_HPP_CONSTEXPR_14 ComponentMapping &setB(VULKAN_HPP_NAMESPACE::ComponentSwizzle b_) VULKAN_HPP_NOEXCEPT { b = b_; return *this; } VULKAN_HPP_CONSTEXPR_14 ComponentMapping &setA(VULKAN_HPP_NAMESPACE::ComponentSwizzle a_) VULKAN_HPP_NOEXCEPT { a = a_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkComponentMapping const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkComponentMapping &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(r, g, b, a); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ComponentMapping const &) const = default; #else bool operator==(ComponentMapping const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (r == rhs.r) && (g == rhs.g) && (b == rhs.b) && (a == rhs.a); # endif } bool operator!=(ComponentMapping const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::ComponentSwizzle r = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; VULKAN_HPP_NAMESPACE::ComponentSwizzle g = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; VULKAN_HPP_NAMESPACE::ComponentSwizzle b = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; VULKAN_HPP_NAMESPACE::ComponentSwizzle a = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ComponentMapping) == sizeof(VkComponentMapping), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ComponentMapping is not nothrow_move_constructible!"); #if defined(VK_USE_PLATFORM_ANDROID_KHR) struct AndroidHardwareBufferFormatProperties2ANDROID { using NativeType = VkAndroidHardwareBufferFormatProperties2ANDROID; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatProperties2ANDROID; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatProperties2ANDROID( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint64_t externalFormat_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 formatFeatures_ = {}, VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), format(format_), externalFormat(externalFormat_), formatFeatures(formatFeatures_), samplerYcbcrConversionComponents(samplerYcbcrConversionComponents_), suggestedYcbcrModel(suggestedYcbcrModel_), suggestedYcbcrRange(suggestedYcbcrRange_), suggestedXChromaOffset(suggestedXChromaOffset_), suggestedYChromaOffset(suggestedYChromaOffset_) { } VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatProperties2ANDROID(AndroidHardwareBufferFormatProperties2ANDROID const &rhs) VULKAN_HPP_NOEXCEPT = default; AndroidHardwareBufferFormatProperties2ANDROID(VkAndroidHardwareBufferFormatProperties2ANDROID const &rhs) VULKAN_HPP_NOEXCEPT : AndroidHardwareBufferFormatProperties2ANDROID(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AndroidHardwareBufferFormatProperties2ANDROID &operator=(AndroidHardwareBufferFormatProperties2ANDROID const &rhs) VULKAN_HPP_NOEXCEPT = default; AndroidHardwareBufferFormatProperties2ANDROID &operator=(VkAndroidHardwareBufferFormatProperties2ANDROID const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkAndroidHardwareBufferFormatProperties2ANDROID const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAndroidHardwareBufferFormatProperties2ANDROID &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, format, externalFormat, formatFeatures, samplerYcbcrConversionComponents, suggestedYcbcrModel, suggestedYcbcrRange, suggestedXChromaOffset, suggestedYChromaOffset); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(AndroidHardwareBufferFormatProperties2ANDROID const &) const = default; # else bool operator==(AndroidHardwareBufferFormatProperties2ANDROID const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (format == rhs.format) && (externalFormat == rhs.externalFormat) && (formatFeatures == rhs.formatFeatures) && (samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents) && (suggestedYcbcrModel == rhs.suggestedYcbcrModel) && (suggestedYcbcrRange == rhs.suggestedYcbcrRange) && (suggestedXChromaOffset == rhs.suggestedXChromaOffset) && (suggestedYChromaOffset == rhs.suggestedYChromaOffset); # endif } bool operator!=(AndroidHardwareBufferFormatProperties2ANDROID const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatProperties2ANDROID; void *pNext = {}; VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; uint64_t externalFormat = {}; VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 formatFeatures = {}; VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {}; VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID) == sizeof(VkAndroidHardwareBufferFormatProperties2ANDROID), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AndroidHardwareBufferFormatProperties2ANDROID is not nothrow_move_constructible!"); template<> struct CppType { using Type = AndroidHardwareBufferFormatProperties2ANDROID; }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #if defined(VK_USE_PLATFORM_ANDROID_KHR) struct AndroidHardwareBufferFormatPropertiesANDROID { using NativeType = VkAndroidHardwareBufferFormatPropertiesANDROID; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint64_t externalFormat_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {}, VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), format(format_), externalFormat(externalFormat_), formatFeatures(formatFeatures_), samplerYcbcrConversionComponents(samplerYcbcrConversionComponents_), suggestedYcbcrModel(suggestedYcbcrModel_), suggestedYcbcrRange(suggestedYcbcrRange_), suggestedXChromaOffset(suggestedXChromaOffset_), suggestedYChromaOffset(suggestedYChromaOffset_) { } VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID(AndroidHardwareBufferFormatPropertiesANDROID const &rhs) VULKAN_HPP_NOEXCEPT = default; AndroidHardwareBufferFormatPropertiesANDROID(VkAndroidHardwareBufferFormatPropertiesANDROID const &rhs) VULKAN_HPP_NOEXCEPT : AndroidHardwareBufferFormatPropertiesANDROID(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AndroidHardwareBufferFormatPropertiesANDROID &operator=(AndroidHardwareBufferFormatPropertiesANDROID const &rhs) VULKAN_HPP_NOEXCEPT = default; AndroidHardwareBufferFormatPropertiesANDROID &operator=(VkAndroidHardwareBufferFormatPropertiesANDROID const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkAndroidHardwareBufferFormatPropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAndroidHardwareBufferFormatPropertiesANDROID &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, format, externalFormat, formatFeatures, samplerYcbcrConversionComponents, suggestedYcbcrModel, suggestedYcbcrRange, suggestedXChromaOffset, suggestedYChromaOffset); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(AndroidHardwareBufferFormatPropertiesANDROID const &) const = default; # else bool operator==(AndroidHardwareBufferFormatPropertiesANDROID const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (format == rhs.format) && (externalFormat == rhs.externalFormat) && (formatFeatures == rhs.formatFeatures) && (samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents) && (suggestedYcbcrModel == rhs.suggestedYcbcrModel) && (suggestedYcbcrRange == rhs.suggestedYcbcrRange) && (suggestedXChromaOffset == rhs.suggestedXChromaOffset) && (suggestedYChromaOffset == rhs.suggestedYChromaOffset); # endif } bool operator!=(AndroidHardwareBufferFormatPropertiesANDROID const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID; void *pNext = {}; VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; uint64_t externalFormat = {}; VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {}; VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {}; VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID) == sizeof(VkAndroidHardwareBufferFormatPropertiesANDROID), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AndroidHardwareBufferFormatPropertiesANDROID is not nothrow_move_constructible!"); template<> struct CppType { using Type = AndroidHardwareBufferFormatPropertiesANDROID; }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #if defined(VK_USE_PLATFORM_ANDROID_KHR) struct AndroidHardwareBufferPropertiesANDROID { using NativeType = VkAndroidHardwareBufferPropertiesANDROID; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferPropertiesANDROID; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID(VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, uint32_t memoryTypeBits_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), allocationSize(allocationSize_), memoryTypeBits(memoryTypeBits_) { } VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID(AndroidHardwareBufferPropertiesANDROID const &rhs) VULKAN_HPP_NOEXCEPT = default; AndroidHardwareBufferPropertiesANDROID(VkAndroidHardwareBufferPropertiesANDROID const &rhs) VULKAN_HPP_NOEXCEPT : AndroidHardwareBufferPropertiesANDROID(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AndroidHardwareBufferPropertiesANDROID &operator=(AndroidHardwareBufferPropertiesANDROID const &rhs) VULKAN_HPP_NOEXCEPT = default; AndroidHardwareBufferPropertiesANDROID &operator=(VkAndroidHardwareBufferPropertiesANDROID const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkAndroidHardwareBufferPropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAndroidHardwareBufferPropertiesANDROID &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, allocationSize, memoryTypeBits); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(AndroidHardwareBufferPropertiesANDROID const &) const = default; # else bool operator==(AndroidHardwareBufferPropertiesANDROID const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (allocationSize == rhs.allocationSize) && (memoryTypeBits == rhs.memoryTypeBits); # endif } bool operator!=(AndroidHardwareBufferPropertiesANDROID const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferPropertiesANDROID; void *pNext = {}; VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {}; uint32_t memoryTypeBits = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID) == sizeof(VkAndroidHardwareBufferPropertiesANDROID), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AndroidHardwareBufferPropertiesANDROID is not nothrow_move_constructible!"); template<> struct CppType { using Type = AndroidHardwareBufferPropertiesANDROID; }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #if defined(VK_USE_PLATFORM_ANDROID_KHR) struct AndroidHardwareBufferUsageANDROID { using NativeType = VkAndroidHardwareBufferUsageANDROID; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferUsageANDROID; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID(uint64_t androidHardwareBufferUsage_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), androidHardwareBufferUsage(androidHardwareBufferUsage_) { } VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID(AndroidHardwareBufferUsageANDROID const &rhs) VULKAN_HPP_NOEXCEPT = default; AndroidHardwareBufferUsageANDROID(VkAndroidHardwareBufferUsageANDROID const &rhs) VULKAN_HPP_NOEXCEPT : AndroidHardwareBufferUsageANDROID(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AndroidHardwareBufferUsageANDROID &operator=(AndroidHardwareBufferUsageANDROID const &rhs) VULKAN_HPP_NOEXCEPT = default; AndroidHardwareBufferUsageANDROID &operator=(VkAndroidHardwareBufferUsageANDROID const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkAndroidHardwareBufferUsageANDROID const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAndroidHardwareBufferUsageANDROID &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, androidHardwareBufferUsage); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(AndroidHardwareBufferUsageANDROID const &) const = default; # else bool operator==(AndroidHardwareBufferUsageANDROID const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (androidHardwareBufferUsage == rhs.androidHardwareBufferUsage); # endif } bool operator!=(AndroidHardwareBufferUsageANDROID const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferUsageANDROID; void *pNext = {}; uint64_t androidHardwareBufferUsage = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID) == sizeof(VkAndroidHardwareBufferUsageANDROID), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AndroidHardwareBufferUsageANDROID is not nothrow_move_constructible!"); template<> struct CppType { using Type = AndroidHardwareBufferUsageANDROID; }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #if defined(VK_USE_PLATFORM_ANDROID_KHR) struct AndroidSurfaceCreateInfoKHR { using NativeType = VkAndroidSurfaceCreateInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidSurfaceCreateInfoKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ = {}, struct ANativeWindow *window_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), window(window_) { } VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR(AndroidSurfaceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; AndroidSurfaceCreateInfoKHR(VkAndroidSurfaceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : AndroidSurfaceCreateInfoKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AndroidSurfaceCreateInfoKHR &operator=(AndroidSurfaceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; AndroidSurfaceCreateInfoKHR &operator=(VkAndroidSurfaceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR &setFlags(VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR &setWindow(struct ANativeWindow *window_) VULKAN_HPP_NOEXCEPT { window = window_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAndroidSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAndroidSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, window); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(AndroidSurfaceCreateInfoKHR const &) const = default; # else bool operator==(AndroidSurfaceCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (window == rhs.window); # endif } bool operator!=(AndroidSurfaceCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidSurfaceCreateInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags = {}; struct ANativeWindow *window = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR) == sizeof(VkAndroidSurfaceCreateInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AndroidSurfaceCreateInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = AndroidSurfaceCreateInfoKHR; }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ struct ApplicationInfo { using NativeType = VkApplicationInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eApplicationInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ApplicationInfo(const char *pApplicationName_ = {}, uint32_t applicationVersion_ = {}, const char *pEngineName_ = {}, uint32_t engineVersion_ = {}, uint32_t apiVersion_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), pApplicationName(pApplicationName_), applicationVersion(applicationVersion_), pEngineName(pEngineName_), engineVersion(engineVersion_), apiVersion(apiVersion_) { } VULKAN_HPP_CONSTEXPR ApplicationInfo(ApplicationInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; ApplicationInfo(VkApplicationInfo const &rhs) VULKAN_HPP_NOEXCEPT : ApplicationInfo(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ApplicationInfo &operator=(ApplicationInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; ApplicationInfo &operator=(VkApplicationInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ApplicationInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ApplicationInfo &setPApplicationName(const char *pApplicationName_) VULKAN_HPP_NOEXCEPT { pApplicationName = pApplicationName_; return *this; } VULKAN_HPP_CONSTEXPR_14 ApplicationInfo &setApplicationVersion(uint32_t applicationVersion_) VULKAN_HPP_NOEXCEPT { applicationVersion = applicationVersion_; return *this; } VULKAN_HPP_CONSTEXPR_14 ApplicationInfo &setPEngineName(const char *pEngineName_) VULKAN_HPP_NOEXCEPT { pEngineName = pEngineName_; return *this; } VULKAN_HPP_CONSTEXPR_14 ApplicationInfo &setEngineVersion(uint32_t engineVersion_) VULKAN_HPP_NOEXCEPT { engineVersion = engineVersion_; return *this; } VULKAN_HPP_CONSTEXPR_14 ApplicationInfo &setApiVersion(uint32_t apiVersion_) VULKAN_HPP_NOEXCEPT { apiVersion = apiVersion_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkApplicationInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkApplicationInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, pApplicationName, applicationVersion, pEngineName, engineVersion, apiVersion); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) std::strong_ordering operator<=>(ApplicationInfo const &rhs) const VULKAN_HPP_NOEXCEPT { if(auto cmp = sType <=> rhs.sType; cmp != 0) return cmp; if(auto cmp = pNext <=> rhs.pNext; cmp != 0) return cmp; if(pApplicationName != rhs.pApplicationName) if(auto cmp = strcmp(pApplicationName, rhs.pApplicationName); cmp != 0) return (cmp < 0) ? std::strong_ordering::less : std::strong_ordering::greater; if(auto cmp = applicationVersion <=> rhs.applicationVersion; cmp != 0) return cmp; if(pEngineName != rhs.pEngineName) if(auto cmp = strcmp(pEngineName, rhs.pEngineName); cmp != 0) return (cmp < 0) ? std::strong_ordering::less : std::strong_ordering::greater; if(auto cmp = engineVersion <=> rhs.engineVersion; cmp != 0) return cmp; if(auto cmp = apiVersion <=> rhs.apiVersion; cmp != 0) return cmp; return std::strong_ordering::equivalent; } #endif bool operator==(ApplicationInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return (sType == rhs.sType) && (pNext == rhs.pNext) && ((pApplicationName == rhs.pApplicationName) || (strcmp(pApplicationName, rhs.pApplicationName) == 0)) && (applicationVersion == rhs.applicationVersion) && ((pEngineName == rhs.pEngineName) || (strcmp(pEngineName, rhs.pEngineName) == 0)) && (engineVersion == rhs.engineVersion) && (apiVersion == rhs.apiVersion); } bool operator!=(ApplicationInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eApplicationInfo; const void *pNext = {}; const char *pApplicationName = {}; uint32_t applicationVersion = {}; const char *pEngineName = {}; uint32_t engineVersion = {}; uint32_t apiVersion = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ApplicationInfo) == sizeof(VkApplicationInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ApplicationInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = ApplicationInfo; }; struct AttachmentDescription { using NativeType = VkAttachmentDescription; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR AttachmentDescription(VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT : flags(flags_), format(format_), samples(samples_), loadOp(loadOp_), storeOp(storeOp_), stencilLoadOp(stencilLoadOp_), stencilStoreOp(stencilStoreOp_), initialLayout(initialLayout_), finalLayout(finalLayout_) { } VULKAN_HPP_CONSTEXPR AttachmentDescription(AttachmentDescription const &rhs) VULKAN_HPP_NOEXCEPT = default; AttachmentDescription(VkAttachmentDescription const &rhs) VULKAN_HPP_NOEXCEPT : AttachmentDescription(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AttachmentDescription &operator=(AttachmentDescription const &rhs) VULKAN_HPP_NOEXCEPT = default; AttachmentDescription &operator=(VkAttachmentDescription const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AttachmentDescription &setFlags(VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription &setFormat(VULKAN_HPP_NAMESPACE::Format format_) VULKAN_HPP_NOEXCEPT { format = format_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription &setSamples(VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_) VULKAN_HPP_NOEXCEPT { samples = samples_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription &setLoadOp(VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_) VULKAN_HPP_NOEXCEPT { loadOp = loadOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription &setStoreOp(VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_) VULKAN_HPP_NOEXCEPT { storeOp = storeOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription &setStencilLoadOp(VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_) VULKAN_HPP_NOEXCEPT { stencilLoadOp = stencilLoadOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription &setStencilStoreOp(VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_) VULKAN_HPP_NOEXCEPT { stencilStoreOp = stencilStoreOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription &setInitialLayout(VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_) VULKAN_HPP_NOEXCEPT { initialLayout = initialLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription &setFinalLayout(VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_) VULKAN_HPP_NOEXCEPT { finalLayout = finalLayout_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAttachmentDescription const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAttachmentDescription &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(flags, format, samples, loadOp, storeOp, stencilLoadOp, stencilStoreOp, initialLayout, finalLayout); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(AttachmentDescription const &) const = default; #else bool operator==(AttachmentDescription const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (flags == rhs.flags) && (format == rhs.format) && (samples == rhs.samples) && (loadOp == rhs.loadOp) && (storeOp == rhs.storeOp) && (stencilLoadOp == rhs.stencilLoadOp) && (stencilStoreOp == rhs.stencilStoreOp) && (initialLayout == rhs.initialLayout) && (finalLayout == rhs.finalLayout); # endif } bool operator!=(AttachmentDescription const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {}; VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AttachmentDescription) == sizeof(VkAttachmentDescription), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AttachmentDescription is not nothrow_move_constructible!"); struct AttachmentDescription2 { using NativeType = VkAttachmentDescription2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescription2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR AttachmentDescription2(VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), format(format_), samples(samples_), loadOp(loadOp_), storeOp(storeOp_), stencilLoadOp(stencilLoadOp_), stencilStoreOp(stencilStoreOp_), initialLayout(initialLayout_), finalLayout(finalLayout_) { } VULKAN_HPP_CONSTEXPR AttachmentDescription2(AttachmentDescription2 const &rhs) VULKAN_HPP_NOEXCEPT = default; AttachmentDescription2(VkAttachmentDescription2 const &rhs) VULKAN_HPP_NOEXCEPT : AttachmentDescription2(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AttachmentDescription2 &operator=(AttachmentDescription2 const &rhs) VULKAN_HPP_NOEXCEPT = default; AttachmentDescription2 &operator=(VkAttachmentDescription2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 &setFlags(VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 &setFormat(VULKAN_HPP_NAMESPACE::Format format_) VULKAN_HPP_NOEXCEPT { format = format_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 &setSamples(VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_) VULKAN_HPP_NOEXCEPT { samples = samples_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 &setLoadOp(VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_) VULKAN_HPP_NOEXCEPT { loadOp = loadOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 &setStoreOp(VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_) VULKAN_HPP_NOEXCEPT { storeOp = storeOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 &setStencilLoadOp(VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_) VULKAN_HPP_NOEXCEPT { stencilLoadOp = stencilLoadOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 &setStencilStoreOp(VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_) VULKAN_HPP_NOEXCEPT { stencilStoreOp = stencilStoreOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 &setInitialLayout(VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_) VULKAN_HPP_NOEXCEPT { initialLayout = initialLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 &setFinalLayout(VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_) VULKAN_HPP_NOEXCEPT { finalLayout = finalLayout_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAttachmentDescription2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAttachmentDescription2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, format, samples, loadOp, storeOp, stencilLoadOp, stencilStoreOp, initialLayout, finalLayout); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(AttachmentDescription2 const &) const = default; #else bool operator==(AttachmentDescription2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (format == rhs.format) && (samples == rhs.samples) && (loadOp == rhs.loadOp) && (storeOp == rhs.storeOp) && (stencilLoadOp == rhs.stencilLoadOp) && (stencilStoreOp == rhs.stencilStoreOp) && (initialLayout == rhs.initialLayout) && (finalLayout == rhs.finalLayout); # endif } bool operator!=(AttachmentDescription2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescription2; const void *pNext = {}; VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {}; VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AttachmentDescription2) == sizeof(VkAttachmentDescription2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AttachmentDescription2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = AttachmentDescription2; }; using AttachmentDescription2KHR = AttachmentDescription2; struct AttachmentDescriptionStencilLayout { using NativeType = VkAttachmentDescriptionStencilLayout; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescriptionStencilLayout; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout(VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), stencilInitialLayout(stencilInitialLayout_), stencilFinalLayout(stencilFinalLayout_) { } VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout(AttachmentDescriptionStencilLayout const &rhs) VULKAN_HPP_NOEXCEPT = default; AttachmentDescriptionStencilLayout(VkAttachmentDescriptionStencilLayout const &rhs) VULKAN_HPP_NOEXCEPT : AttachmentDescriptionStencilLayout(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AttachmentDescriptionStencilLayout &operator=(AttachmentDescriptionStencilLayout const &rhs) VULKAN_HPP_NOEXCEPT = default; AttachmentDescriptionStencilLayout &operator=(VkAttachmentDescriptionStencilLayout const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & setStencilInitialLayout(VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_) VULKAN_HPP_NOEXCEPT { stencilInitialLayout = stencilInitialLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & setStencilFinalLayout(VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_) VULKAN_HPP_NOEXCEPT { stencilFinalLayout = stencilFinalLayout_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAttachmentDescriptionStencilLayout const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAttachmentDescriptionStencilLayout &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std:: tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, stencilInitialLayout, stencilFinalLayout); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(AttachmentDescriptionStencilLayout const &) const = default; #else bool operator==(AttachmentDescriptionStencilLayout const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (stencilInitialLayout == rhs.stencilInitialLayout) && (stencilFinalLayout == rhs.stencilFinalLayout); # endif } bool operator!=(AttachmentDescriptionStencilLayout const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescriptionStencilLayout; void *pNext = {}; VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout) == sizeof(VkAttachmentDescriptionStencilLayout), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AttachmentDescriptionStencilLayout is not nothrow_move_constructible!"); template<> struct CppType { using Type = AttachmentDescriptionStencilLayout; }; using AttachmentDescriptionStencilLayoutKHR = AttachmentDescriptionStencilLayout; struct AttachmentReference { using NativeType = VkAttachmentReference; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR AttachmentReference(uint32_t attachment_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT : attachment(attachment_), layout(layout_) { } VULKAN_HPP_CONSTEXPR AttachmentReference(AttachmentReference const &rhs) VULKAN_HPP_NOEXCEPT = default; AttachmentReference(VkAttachmentReference const &rhs) VULKAN_HPP_NOEXCEPT : AttachmentReference(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AttachmentReference &operator=(AttachmentReference const &rhs) VULKAN_HPP_NOEXCEPT = default; AttachmentReference &operator=(VkAttachmentReference const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AttachmentReference &setAttachment(uint32_t attachment_) VULKAN_HPP_NOEXCEPT { attachment = attachment_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentReference &setLayout(VULKAN_HPP_NAMESPACE::ImageLayout layout_) VULKAN_HPP_NOEXCEPT { layout = layout_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAttachmentReference const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAttachmentReference &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(attachment, layout); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(AttachmentReference const &) const = default; #else bool operator==(AttachmentReference const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (attachment == rhs.attachment) && (layout == rhs.layout); # endif } bool operator!=(AttachmentReference const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t attachment = {}; VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AttachmentReference) == sizeof(VkAttachmentReference), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AttachmentReference is not nothrow_move_constructible!"); struct AttachmentReference2 { using NativeType = VkAttachmentReference2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReference2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR AttachmentReference2(uint32_t attachment_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), attachment(attachment_), layout(layout_), aspectMask(aspectMask_) { } VULKAN_HPP_CONSTEXPR AttachmentReference2(AttachmentReference2 const &rhs) VULKAN_HPP_NOEXCEPT = default; AttachmentReference2(VkAttachmentReference2 const &rhs) VULKAN_HPP_NOEXCEPT : AttachmentReference2(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AttachmentReference2 &operator=(AttachmentReference2 const &rhs) VULKAN_HPP_NOEXCEPT = default; AttachmentReference2 &operator=(VkAttachmentReference2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 &setAttachment(uint32_t attachment_) VULKAN_HPP_NOEXCEPT { attachment = attachment_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 &setLayout(VULKAN_HPP_NAMESPACE::ImageLayout layout_) VULKAN_HPP_NOEXCEPT { layout = layout_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 &setAspectMask(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_) VULKAN_HPP_NOEXCEPT { aspectMask = aspectMask_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAttachmentReference2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAttachmentReference2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, attachment, layout, aspectMask); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(AttachmentReference2 const &) const = default; #else bool operator==(AttachmentReference2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (attachment == rhs.attachment) && (layout == rhs.layout) && (aspectMask == rhs.aspectMask); # endif } bool operator!=(AttachmentReference2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReference2; const void *pNext = {}; uint32_t attachment = {}; VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AttachmentReference2) == sizeof(VkAttachmentReference2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AttachmentReference2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = AttachmentReference2; }; using AttachmentReference2KHR = AttachmentReference2; struct AttachmentReferenceStencilLayout { using NativeType = VkAttachmentReferenceStencilLayout; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReferenceStencilLayout; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout(VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), stencilLayout(stencilLayout_) { } VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout(AttachmentReferenceStencilLayout const &rhs) VULKAN_HPP_NOEXCEPT = default; AttachmentReferenceStencilLayout(VkAttachmentReferenceStencilLayout const &rhs) VULKAN_HPP_NOEXCEPT : AttachmentReferenceStencilLayout(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AttachmentReferenceStencilLayout &operator=(AttachmentReferenceStencilLayout const &rhs) VULKAN_HPP_NOEXCEPT = default; AttachmentReferenceStencilLayout &operator=(VkAttachmentReferenceStencilLayout const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AttachmentReferenceStencilLayout &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentReferenceStencilLayout &setStencilLayout(VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_) VULKAN_HPP_NOEXCEPT { stencilLayout = stencilLayout_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAttachmentReferenceStencilLayout const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAttachmentReferenceStencilLayout &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, stencilLayout); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(AttachmentReferenceStencilLayout const &) const = default; #else bool operator==(AttachmentReferenceStencilLayout const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (stencilLayout == rhs.stencilLayout); # endif } bool operator!=(AttachmentReferenceStencilLayout const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReferenceStencilLayout; void *pNext = {}; VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout) == sizeof(VkAttachmentReferenceStencilLayout), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AttachmentReferenceStencilLayout is not nothrow_move_constructible!"); template<> struct CppType { using Type = AttachmentReferenceStencilLayout; }; using AttachmentReferenceStencilLayoutKHR = AttachmentReferenceStencilLayout; struct AttachmentSampleCountInfoAMD { using NativeType = VkAttachmentSampleCountInfoAMD; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentSampleCountInfoAMD; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR AttachmentSampleCountInfoAMD(uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::SampleCountFlagBits *pColorAttachmentSamples_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), colorAttachmentCount(colorAttachmentCount_), pColorAttachmentSamples(pColorAttachmentSamples_), depthStencilAttachmentSamples(depthStencilAttachmentSamples_) { } VULKAN_HPP_CONSTEXPR AttachmentSampleCountInfoAMD(AttachmentSampleCountInfoAMD const &rhs) VULKAN_HPP_NOEXCEPT = default; AttachmentSampleCountInfoAMD(VkAttachmentSampleCountInfoAMD const &rhs) VULKAN_HPP_NOEXCEPT : AttachmentSampleCountInfoAMD(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) AttachmentSampleCountInfoAMD( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &colorAttachmentSamples_, VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, const void *pNext_ = nullptr) : pNext(pNext_) , colorAttachmentCount(static_cast(colorAttachmentSamples_.size())) , pColorAttachmentSamples(colorAttachmentSamples_.data()) , depthStencilAttachmentSamples(depthStencilAttachmentSamples_) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AttachmentSampleCountInfoAMD &operator=(AttachmentSampleCountInfoAMD const &rhs) VULKAN_HPP_NOEXCEPT = default; AttachmentSampleCountInfoAMD &operator=(VkAttachmentSampleCountInfoAMD const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD &setColorAttachmentCount(uint32_t colorAttachmentCount_) VULKAN_HPP_NOEXCEPT { colorAttachmentCount = colorAttachmentCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setPColorAttachmentSamples(const VULKAN_HPP_NAMESPACE::SampleCountFlagBits *pColorAttachmentSamples_) VULKAN_HPP_NOEXCEPT { pColorAttachmentSamples = pColorAttachmentSamples_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) AttachmentSampleCountInfoAMD &setColorAttachmentSamples( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &colorAttachmentSamples_) VULKAN_HPP_NOEXCEPT { colorAttachmentCount = static_cast(colorAttachmentSamples_.size()); pColorAttachmentSamples = colorAttachmentSamples_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setDepthStencilAttachmentSamples(VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_) VULKAN_HPP_NOEXCEPT { depthStencilAttachmentSamples = depthStencilAttachmentSamples_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAttachmentSampleCountInfoAMD const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAttachmentSampleCountInfoAMD &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, colorAttachmentCount, pColorAttachmentSamples, depthStencilAttachmentSamples); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(AttachmentSampleCountInfoAMD const &) const = default; #else bool operator==(AttachmentSampleCountInfoAMD const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (colorAttachmentCount == rhs.colorAttachmentCount) && (pColorAttachmentSamples == rhs.pColorAttachmentSamples) && (depthStencilAttachmentSamples == rhs.depthStencilAttachmentSamples); # endif } bool operator!=(AttachmentSampleCountInfoAMD const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentSampleCountInfoAMD; const void *pNext = {}; uint32_t colorAttachmentCount = {}; const VULKAN_HPP_NAMESPACE::SampleCountFlagBits *pColorAttachmentSamples = {}; VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD) == sizeof(VkAttachmentSampleCountInfoAMD), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AttachmentSampleCountInfoAMD is not nothrow_move_constructible!"); template<> struct CppType { using Type = AttachmentSampleCountInfoAMD; }; using AttachmentSampleCountInfoNV = AttachmentSampleCountInfoAMD; struct Extent2D { using NativeType = VkExtent2D; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR Extent2D(uint32_t width_ = {}, uint32_t height_ = {}) VULKAN_HPP_NOEXCEPT : width(width_), height(height_) { } VULKAN_HPP_CONSTEXPR Extent2D(Extent2D const &rhs) VULKAN_HPP_NOEXCEPT = default; Extent2D(VkExtent2D const &rhs) VULKAN_HPP_NOEXCEPT : Extent2D(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ Extent2D &operator=(Extent2D const &rhs) VULKAN_HPP_NOEXCEPT = default; Extent2D &operator=(VkExtent2D const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 Extent2D &setWidth(uint32_t width_) VULKAN_HPP_NOEXCEPT { width = width_; return *this; } VULKAN_HPP_CONSTEXPR_14 Extent2D &setHeight(uint32_t height_) VULKAN_HPP_NOEXCEPT { height = height_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkExtent2D const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkExtent2D &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(width, height); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(Extent2D const &) const = default; #else bool operator==(Extent2D const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (width == rhs.width) && (height == rhs.height); # endif } bool operator!=(Extent2D const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t width = {}; uint32_t height = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::Extent2D) == sizeof(VkExtent2D), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "Extent2D is not nothrow_move_constructible!"); struct SampleLocationEXT { using NativeType = VkSampleLocationEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SampleLocationEXT(float x_ = {}, float y_ = {}) VULKAN_HPP_NOEXCEPT : x(x_), y(y_) { } VULKAN_HPP_CONSTEXPR SampleLocationEXT(SampleLocationEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; SampleLocationEXT(VkSampleLocationEXT const &rhs) VULKAN_HPP_NOEXCEPT : SampleLocationEXT(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SampleLocationEXT &operator=(SampleLocationEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; SampleLocationEXT &operator=(VkSampleLocationEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SampleLocationEXT &setX(float x_) VULKAN_HPP_NOEXCEPT { x = x_; return *this; } VULKAN_HPP_CONSTEXPR_14 SampleLocationEXT &setY(float y_) VULKAN_HPP_NOEXCEPT { y = y_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSampleLocationEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSampleLocationEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(x, y); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SampleLocationEXT const &) const = default; #else bool operator==(SampleLocationEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (x == rhs.x) && (y == rhs.y); # endif } bool operator!=(SampleLocationEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: float x = {}; float y = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SampleLocationEXT) == sizeof(VkSampleLocationEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SampleLocationEXT is not nothrow_move_constructible!"); struct SampleLocationsInfoEXT { using NativeType = VkSampleLocationsInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSampleLocationsInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ = {}, uint32_t sampleLocationsCount_ = {}, const VULKAN_HPP_NAMESPACE::SampleLocationEXT *pSampleLocations_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), sampleLocationsPerPixel(sampleLocationsPerPixel_), sampleLocationGridSize(sampleLocationGridSize_), sampleLocationsCount(sampleLocationsCount_), pSampleLocations(pSampleLocations_) { } VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT(SampleLocationsInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; SampleLocationsInfoEXT(VkSampleLocationsInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : SampleLocationsInfoEXT(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SampleLocationsInfoEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_, VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &sampleLocations_, const void *pNext_ = nullptr) : pNext(pNext_) , sampleLocationsPerPixel(sampleLocationsPerPixel_) , sampleLocationGridSize(sampleLocationGridSize_) , sampleLocationsCount(static_cast(sampleLocations_.size())) , pSampleLocations(sampleLocations_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SampleLocationsInfoEXT &operator=(SampleLocationsInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; SampleLocationsInfoEXT &operator=(VkSampleLocationsInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setSampleLocationsPerPixel(VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_) VULKAN_HPP_NOEXCEPT { sampleLocationsPerPixel = sampleLocationsPerPixel_; return *this; } VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setSampleLocationGridSize(VULKAN_HPP_NAMESPACE::Extent2D const &sampleLocationGridSize_) VULKAN_HPP_NOEXCEPT { sampleLocationGridSize = sampleLocationGridSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT &setSampleLocationsCount(uint32_t sampleLocationsCount_) VULKAN_HPP_NOEXCEPT { sampleLocationsCount = sampleLocationsCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setPSampleLocations(const VULKAN_HPP_NAMESPACE::SampleLocationEXT *pSampleLocations_) VULKAN_HPP_NOEXCEPT { pSampleLocations = pSampleLocations_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SampleLocationsInfoEXT &setSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &sampleLocations_) VULKAN_HPP_NOEXCEPT { sampleLocationsCount = static_cast(sampleLocations_.size()); pSampleLocations = sampleLocations_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSampleLocationsInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSampleLocationsInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, sampleLocationsPerPixel, sampleLocationGridSize, sampleLocationsCount, pSampleLocations); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SampleLocationsInfoEXT const &) const = default; #else bool operator==(SampleLocationsInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (sampleLocationsPerPixel == rhs.sampleLocationsPerPixel) && (sampleLocationGridSize == rhs.sampleLocationGridSize) && (sampleLocationsCount == rhs.sampleLocationsCount) && (pSampleLocations == rhs.pSampleLocations); # endif } bool operator!=(SampleLocationsInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSampleLocationsInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize = {}; uint32_t sampleLocationsCount = {}; const VULKAN_HPP_NAMESPACE::SampleLocationEXT *pSampleLocations = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT) == sizeof(VkSampleLocationsInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SampleLocationsInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = SampleLocationsInfoEXT; }; struct AttachmentSampleLocationsEXT { using NativeType = VkAttachmentSampleLocationsEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT(uint32_t attachmentIndex_ = {}, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {}) VULKAN_HPP_NOEXCEPT : attachmentIndex(attachmentIndex_), sampleLocationsInfo(sampleLocationsInfo_) { } VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT(AttachmentSampleLocationsEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; AttachmentSampleLocationsEXT(VkAttachmentSampleLocationsEXT const &rhs) VULKAN_HPP_NOEXCEPT : AttachmentSampleLocationsEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AttachmentSampleLocationsEXT &operator=(AttachmentSampleLocationsEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; AttachmentSampleLocationsEXT &operator=(VkAttachmentSampleLocationsEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT &setAttachmentIndex(uint32_t attachmentIndex_) VULKAN_HPP_NOEXCEPT { attachmentIndex = attachmentIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT & setSampleLocationsInfo(VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const &sampleLocationsInfo_) VULKAN_HPP_NOEXCEPT { sampleLocationsInfo = sampleLocationsInfo_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkAttachmentSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkAttachmentSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(attachmentIndex, sampleLocationsInfo); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(AttachmentSampleLocationsEXT const &) const = default; #else bool operator==(AttachmentSampleLocationsEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (attachmentIndex == rhs.attachmentIndex) && (sampleLocationsInfo == rhs.sampleLocationsInfo); # endif } bool operator!=(AttachmentSampleLocationsEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t attachmentIndex = {}; VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT) == sizeof(VkAttachmentSampleLocationsEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "AttachmentSampleLocationsEXT is not nothrow_move_constructible!"); struct BaseInStructure { using NativeType = VkBaseInStructure; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) BaseInStructure(VULKAN_HPP_NAMESPACE::StructureType sType_ = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo, const struct VULKAN_HPP_NAMESPACE::BaseInStructure *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : sType(sType_), pNext(pNext_) { } BaseInStructure(BaseInStructure const &rhs) VULKAN_HPP_NOEXCEPT = default; BaseInStructure(VkBaseInStructure const &rhs) VULKAN_HPP_NOEXCEPT : BaseInStructure(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BaseInStructure &operator=(BaseInStructure const &rhs) VULKAN_HPP_NOEXCEPT = default; BaseInStructure &operator=(VkBaseInStructure const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BaseInStructure &setPNext(const struct VULKAN_HPP_NAMESPACE::BaseInStructure *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBaseInStructure const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBaseInStructure &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BaseInStructure const &) const = default; #else bool operator==(BaseInStructure const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext); # endif } bool operator!=(BaseInStructure const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo; const struct VULKAN_HPP_NAMESPACE::BaseInStructure *pNext = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BaseInStructure) == sizeof(VkBaseInStructure), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BaseInStructure is not nothrow_move_constructible!"); struct BaseOutStructure { using NativeType = VkBaseOutStructure; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) BaseOutStructure(VULKAN_HPP_NAMESPACE::StructureType sType_ = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo, struct VULKAN_HPP_NAMESPACE::BaseOutStructure *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : sType(sType_), pNext(pNext_) { } BaseOutStructure(BaseOutStructure const &rhs) VULKAN_HPP_NOEXCEPT = default; BaseOutStructure(VkBaseOutStructure const &rhs) VULKAN_HPP_NOEXCEPT : BaseOutStructure(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BaseOutStructure &operator=(BaseOutStructure const &rhs) VULKAN_HPP_NOEXCEPT = default; BaseOutStructure &operator=(VkBaseOutStructure const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BaseOutStructure &setPNext(struct VULKAN_HPP_NAMESPACE::BaseOutStructure *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBaseOutStructure const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBaseOutStructure &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BaseOutStructure const &) const = default; #else bool operator==(BaseOutStructure const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext); # endif } bool operator!=(BaseOutStructure const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo; struct VULKAN_HPP_NAMESPACE::BaseOutStructure *pNext = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BaseOutStructure) == sizeof(VkBaseOutStructure), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BaseOutStructure is not nothrow_move_constructible!"); struct BindAccelerationStructureMemoryInfoNV { using NativeType = VkBindAccelerationStructureMemoryInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindAccelerationStructureMemoryInfoNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, uint32_t deviceIndexCount_ = {}, const uint32_t *pDeviceIndices_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), accelerationStructure(accelerationStructure_), memory(memory_), memoryOffset(memoryOffset_), deviceIndexCount(deviceIndexCount_), pDeviceIndices(pDeviceIndices_) { } VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV(BindAccelerationStructureMemoryInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; BindAccelerationStructureMemoryInfoNV(VkBindAccelerationStructureMemoryInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : BindAccelerationStructureMemoryInfoNV(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) BindAccelerationStructureMemoryInfoNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_, VULKAN_HPP_NAMESPACE::DeviceMemory memory_, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &deviceIndices_, const void *pNext_ = nullptr) : pNext(pNext_) , accelerationStructure(accelerationStructure_) , memory(memory_) , memoryOffset(memoryOffset_) , deviceIndexCount(static_cast(deviceIndices_.size())) , pDeviceIndices(deviceIndices_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BindAccelerationStructureMemoryInfoNV &operator=(BindAccelerationStructureMemoryInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; BindAccelerationStructureMemoryInfoNV &operator=(VkBindAccelerationStructureMemoryInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setAccelerationStructure(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_) VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV &setMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory_) VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV &setMemoryOffset(VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_) VULKAN_HPP_NOEXCEPT { memoryOffset = memoryOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV &setDeviceIndexCount(uint32_t deviceIndexCount_) VULKAN_HPP_NOEXCEPT { deviceIndexCount = deviceIndexCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV &setPDeviceIndices(const uint32_t *pDeviceIndices_) VULKAN_HPP_NOEXCEPT { pDeviceIndices = pDeviceIndices_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) BindAccelerationStructureMemoryInfoNV & setDeviceIndices(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &deviceIndices_) VULKAN_HPP_NOEXCEPT { deviceIndexCount = static_cast(deviceIndices_.size()); pDeviceIndices = deviceIndices_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBindAccelerationStructureMemoryInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBindAccelerationStructureMemoryInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, accelerationStructure, memory, memoryOffset, deviceIndexCount, pDeviceIndices); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BindAccelerationStructureMemoryInfoNV const &) const = default; #else bool operator==(BindAccelerationStructureMemoryInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (accelerationStructure == rhs.accelerationStructure) && (memory == rhs.memory) && (memoryOffset == rhs.memoryOffset) && (deviceIndexCount == rhs.deviceIndexCount) && (pDeviceIndices == rhs.pDeviceIndices); # endif } bool operator!=(BindAccelerationStructureMemoryInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {}; VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; uint32_t deviceIndexCount = {}; const uint32_t *pDeviceIndices = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV) == sizeof(VkBindAccelerationStructureMemoryInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BindAccelerationStructureMemoryInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = BindAccelerationStructureMemoryInfoNV; }; struct BindBufferMemoryDeviceGroupInfo { using NativeType = VkBindBufferMemoryDeviceGroupInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryDeviceGroupInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo(uint32_t deviceIndexCount_ = {}, const uint32_t *pDeviceIndices_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), deviceIndexCount(deviceIndexCount_), pDeviceIndices(pDeviceIndices_) { } VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo(BindBufferMemoryDeviceGroupInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; BindBufferMemoryDeviceGroupInfo(VkBindBufferMemoryDeviceGroupInfo const &rhs) VULKAN_HPP_NOEXCEPT : BindBufferMemoryDeviceGroupInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) BindBufferMemoryDeviceGroupInfo(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &deviceIndices_, const void *pNext_ = nullptr) : pNext(pNext_) , deviceIndexCount(static_cast(deviceIndices_.size())) , pDeviceIndices(deviceIndices_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BindBufferMemoryDeviceGroupInfo &operator=(BindBufferMemoryDeviceGroupInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; BindBufferMemoryDeviceGroupInfo &operator=(VkBindBufferMemoryDeviceGroupInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo &setDeviceIndexCount(uint32_t deviceIndexCount_) VULKAN_HPP_NOEXCEPT { deviceIndexCount = deviceIndexCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo &setPDeviceIndices(const uint32_t *pDeviceIndices_) VULKAN_HPP_NOEXCEPT { pDeviceIndices = pDeviceIndices_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) BindBufferMemoryDeviceGroupInfo & setDeviceIndices(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &deviceIndices_) VULKAN_HPP_NOEXCEPT { deviceIndexCount = static_cast(deviceIndices_.size()); pDeviceIndices = deviceIndices_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBindBufferMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBindBufferMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, deviceIndexCount, pDeviceIndices); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BindBufferMemoryDeviceGroupInfo const &) const = default; #else bool operator==(BindBufferMemoryDeviceGroupInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (deviceIndexCount == rhs.deviceIndexCount) && (pDeviceIndices == rhs.pDeviceIndices); # endif } bool operator!=(BindBufferMemoryDeviceGroupInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryDeviceGroupInfo; const void *pNext = {}; uint32_t deviceIndexCount = {}; const uint32_t *pDeviceIndices = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo) == sizeof(VkBindBufferMemoryDeviceGroupInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BindBufferMemoryDeviceGroupInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = BindBufferMemoryDeviceGroupInfo; }; using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo; struct BindBufferMemoryInfo { using NativeType = VkBindBufferMemoryInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), buffer(buffer_), memory(memory_), memoryOffset(memoryOffset_) { } VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo(BindBufferMemoryInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; BindBufferMemoryInfo(VkBindBufferMemoryInfo const &rhs) VULKAN_HPP_NOEXCEPT : BindBufferMemoryInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BindBufferMemoryInfo &operator=(BindBufferMemoryInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; BindBufferMemoryInfo &operator=(VkBindBufferMemoryInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo &setBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer_) VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo &setMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory_) VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo &setMemoryOffset(VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_) VULKAN_HPP_NOEXCEPT { memoryOffset = memoryOffset_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBindBufferMemoryInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBindBufferMemoryInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, buffer, memory, memoryOffset); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BindBufferMemoryInfo const &) const = default; #else bool operator==(BindBufferMemoryInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (buffer == rhs.buffer) && (memory == rhs.memory) && (memoryOffset == rhs.memoryOffset); # endif } bool operator!=(BindBufferMemoryInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Buffer buffer = {}; VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo) == sizeof(VkBindBufferMemoryInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BindBufferMemoryInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = BindBufferMemoryInfo; }; using BindBufferMemoryInfoKHR = BindBufferMemoryInfo; struct Offset2D { using NativeType = VkOffset2D; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR Offset2D(int32_t x_ = {}, int32_t y_ = {}) VULKAN_HPP_NOEXCEPT : x(x_), y(y_) { } VULKAN_HPP_CONSTEXPR Offset2D(Offset2D const &rhs) VULKAN_HPP_NOEXCEPT = default; Offset2D(VkOffset2D const &rhs) VULKAN_HPP_NOEXCEPT : Offset2D(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ Offset2D &operator=(Offset2D const &rhs) VULKAN_HPP_NOEXCEPT = default; Offset2D &operator=(VkOffset2D const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 Offset2D &setX(int32_t x_) VULKAN_HPP_NOEXCEPT { x = x_; return *this; } VULKAN_HPP_CONSTEXPR_14 Offset2D &setY(int32_t y_) VULKAN_HPP_NOEXCEPT { y = y_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkOffset2D const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkOffset2D &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(x, y); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(Offset2D const &) const = default; #else bool operator==(Offset2D const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (x == rhs.x) && (y == rhs.y); # endif } bool operator!=(Offset2D const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: int32_t x = {}; int32_t y = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::Offset2D) == sizeof(VkOffset2D), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "Offset2D is not nothrow_move_constructible!"); struct Rect2D { using NativeType = VkRect2D; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR Rect2D(VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {}) VULKAN_HPP_NOEXCEPT : offset(offset_), extent(extent_) { } VULKAN_HPP_CONSTEXPR Rect2D(Rect2D const &rhs) VULKAN_HPP_NOEXCEPT = default; Rect2D(VkRect2D const &rhs) VULKAN_HPP_NOEXCEPT : Rect2D(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ Rect2D &operator=(Rect2D const &rhs) VULKAN_HPP_NOEXCEPT = default; Rect2D &operator=(VkRect2D const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 Rect2D &setOffset(VULKAN_HPP_NAMESPACE::Offset2D const &offset_) VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 Rect2D &setExtent(VULKAN_HPP_NAMESPACE::Extent2D const &extent_) VULKAN_HPP_NOEXCEPT { extent = extent_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkRect2D const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkRect2D &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(offset, extent); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(Rect2D const &) const = default; #else bool operator==(Rect2D const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (offset == rhs.offset) && (extent == rhs.extent); # endif } bool operator!=(Rect2D const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::Offset2D offset = {}; VULKAN_HPP_NAMESPACE::Extent2D extent = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::Rect2D) == sizeof(VkRect2D), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "Rect2D is not nothrow_move_constructible!"); struct BindImageMemoryDeviceGroupInfo { using NativeType = VkBindImageMemoryDeviceGroupInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryDeviceGroupInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo(uint32_t deviceIndexCount_ = {}, const uint32_t *pDeviceIndices_ = {}, uint32_t splitInstanceBindRegionCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D *pSplitInstanceBindRegions_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), deviceIndexCount(deviceIndexCount_), pDeviceIndices(pDeviceIndices_), splitInstanceBindRegionCount(splitInstanceBindRegionCount_), pSplitInstanceBindRegions(pSplitInstanceBindRegions_) { } VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo(BindImageMemoryDeviceGroupInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; BindImageMemoryDeviceGroupInfo(VkBindImageMemoryDeviceGroupInfo const &rhs) VULKAN_HPP_NOEXCEPT : BindImageMemoryDeviceGroupInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) BindImageMemoryDeviceGroupInfo(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &deviceIndices_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &splitInstanceBindRegions_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , deviceIndexCount(static_cast(deviceIndices_.size())) , pDeviceIndices(deviceIndices_.data()) , splitInstanceBindRegionCount(static_cast(splitInstanceBindRegions_.size())) , pSplitInstanceBindRegions(splitInstanceBindRegions_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BindImageMemoryDeviceGroupInfo &operator=(BindImageMemoryDeviceGroupInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; BindImageMemoryDeviceGroupInfo &operator=(VkBindImageMemoryDeviceGroupInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo &setDeviceIndexCount(uint32_t deviceIndexCount_) VULKAN_HPP_NOEXCEPT { deviceIndexCount = deviceIndexCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo &setPDeviceIndices(const uint32_t *pDeviceIndices_) VULKAN_HPP_NOEXCEPT { pDeviceIndices = pDeviceIndices_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) BindImageMemoryDeviceGroupInfo & setDeviceIndices(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &deviceIndices_) VULKAN_HPP_NOEXCEPT { deviceIndexCount = static_cast(deviceIndices_.size()); pDeviceIndices = deviceIndices_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo &setSplitInstanceBindRegionCount(uint32_t splitInstanceBindRegionCount_) VULKAN_HPP_NOEXCEPT { splitInstanceBindRegionCount = splitInstanceBindRegionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setPSplitInstanceBindRegions(const VULKAN_HPP_NAMESPACE::Rect2D *pSplitInstanceBindRegions_) VULKAN_HPP_NOEXCEPT { pSplitInstanceBindRegions = pSplitInstanceBindRegions_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) BindImageMemoryDeviceGroupInfo &setSplitInstanceBindRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &splitInstanceBindRegions_) VULKAN_HPP_NOEXCEPT { splitInstanceBindRegionCount = static_cast(splitInstanceBindRegions_.size()); pSplitInstanceBindRegions = splitInstanceBindRegions_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBindImageMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBindImageMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, deviceIndexCount, pDeviceIndices, splitInstanceBindRegionCount, pSplitInstanceBindRegions); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BindImageMemoryDeviceGroupInfo const &) const = default; #else bool operator==(BindImageMemoryDeviceGroupInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (deviceIndexCount == rhs.deviceIndexCount) && (pDeviceIndices == rhs.pDeviceIndices) && (splitInstanceBindRegionCount == rhs.splitInstanceBindRegionCount) && (pSplitInstanceBindRegions == rhs.pSplitInstanceBindRegions); # endif } bool operator!=(BindImageMemoryDeviceGroupInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryDeviceGroupInfo; const void *pNext = {}; uint32_t deviceIndexCount = {}; const uint32_t *pDeviceIndices = {}; uint32_t splitInstanceBindRegionCount = {}; const VULKAN_HPP_NAMESPACE::Rect2D *pSplitInstanceBindRegions = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo) == sizeof(VkBindImageMemoryDeviceGroupInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BindImageMemoryDeviceGroupInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = BindImageMemoryDeviceGroupInfo; }; using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo; struct BindImageMemoryInfo { using NativeType = VkBindImageMemoryInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR BindImageMemoryInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), image(image_), memory(memory_), memoryOffset(memoryOffset_) { } VULKAN_HPP_CONSTEXPR BindImageMemoryInfo(BindImageMemoryInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; BindImageMemoryInfo(VkBindImageMemoryInfo const &rhs) VULKAN_HPP_NOEXCEPT : BindImageMemoryInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BindImageMemoryInfo &operator=(BindImageMemoryInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; BindImageMemoryInfo &operator=(VkBindImageMemoryInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo &setImage(VULKAN_HPP_NAMESPACE::Image image_) VULKAN_HPP_NOEXCEPT { image = image_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo &setMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory_) VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo &setMemoryOffset(VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_) VULKAN_HPP_NOEXCEPT { memoryOffset = memoryOffset_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBindImageMemoryInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBindImageMemoryInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, image, memory, memoryOffset); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BindImageMemoryInfo const &) const = default; #else bool operator==(BindImageMemoryInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (image == rhs.image) && (memory == rhs.memory) && (memoryOffset == rhs.memoryOffset); # endif } bool operator!=(BindImageMemoryInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Image image = {}; VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BindImageMemoryInfo) == sizeof(VkBindImageMemoryInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BindImageMemoryInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = BindImageMemoryInfo; }; using BindImageMemoryInfoKHR = BindImageMemoryInfo; struct BindImageMemorySwapchainInfoKHR { using NativeType = VkBindImageMemorySwapchainInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemorySwapchainInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, uint32_t imageIndex_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), swapchain(swapchain_), imageIndex(imageIndex_) { } VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR(BindImageMemorySwapchainInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; BindImageMemorySwapchainInfoKHR(VkBindImageMemorySwapchainInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : BindImageMemorySwapchainInfoKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BindImageMemorySwapchainInfoKHR &operator=(BindImageMemorySwapchainInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; BindImageMemorySwapchainInfoKHR &operator=(VkBindImageMemorySwapchainInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR &setSwapchain(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_) VULKAN_HPP_NOEXCEPT { swapchain = swapchain_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR &setImageIndex(uint32_t imageIndex_) VULKAN_HPP_NOEXCEPT { imageIndex = imageIndex_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBindImageMemorySwapchainInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBindImageMemorySwapchainInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, swapchain, imageIndex); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BindImageMemorySwapchainInfoKHR const &) const = default; #else bool operator==(BindImageMemorySwapchainInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (swapchain == rhs.swapchain) && (imageIndex == rhs.imageIndex); # endif } bool operator!=(BindImageMemorySwapchainInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemorySwapchainInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; uint32_t imageIndex = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR) == sizeof(VkBindImageMemorySwapchainInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BindImageMemorySwapchainInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = BindImageMemorySwapchainInfoKHR; }; struct BindImagePlaneMemoryInfo { using NativeType = VkBindImagePlaneMemoryInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImagePlaneMemoryInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo(VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), planeAspect(planeAspect_) { } VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo(BindImagePlaneMemoryInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; BindImagePlaneMemoryInfo(VkBindImagePlaneMemoryInfo const &rhs) VULKAN_HPP_NOEXCEPT : BindImagePlaneMemoryInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BindImagePlaneMemoryInfo &operator=(BindImagePlaneMemoryInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; BindImagePlaneMemoryInfo &operator=(VkBindImagePlaneMemoryInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BindImagePlaneMemoryInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindImagePlaneMemoryInfo &setPlaneAspect(VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_) VULKAN_HPP_NOEXCEPT { planeAspect = planeAspect_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBindImagePlaneMemoryInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBindImagePlaneMemoryInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, planeAspect); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BindImagePlaneMemoryInfo const &) const = default; #else bool operator==(BindImagePlaneMemoryInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (planeAspect == rhs.planeAspect); # endif } bool operator!=(BindImagePlaneMemoryInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImagePlaneMemoryInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo) == sizeof(VkBindImagePlaneMemoryInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BindImagePlaneMemoryInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = BindImagePlaneMemoryInfo; }; using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo; struct BindIndexBufferIndirectCommandNV { using NativeType = VkBindIndexBufferIndirectCommandNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV(VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, uint32_t size_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16) VULKAN_HPP_NOEXCEPT : bufferAddress(bufferAddress_), size(size_), indexType(indexType_) { } VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV(BindIndexBufferIndirectCommandNV const &rhs) VULKAN_HPP_NOEXCEPT = default; BindIndexBufferIndirectCommandNV(VkBindIndexBufferIndirectCommandNV const &rhs) VULKAN_HPP_NOEXCEPT : BindIndexBufferIndirectCommandNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BindIndexBufferIndirectCommandNV &operator=(BindIndexBufferIndirectCommandNV const &rhs) VULKAN_HPP_NOEXCEPT = default; BindIndexBufferIndirectCommandNV &operator=(VkBindIndexBufferIndirectCommandNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV &setBufferAddress(VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_) VULKAN_HPP_NOEXCEPT { bufferAddress = bufferAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV &setSize(uint32_t size_) VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV &setIndexType(VULKAN_HPP_NAMESPACE::IndexType indexType_) VULKAN_HPP_NOEXCEPT { indexType = indexType_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBindIndexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBindIndexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(bufferAddress, size, indexType); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BindIndexBufferIndirectCommandNV const &) const = default; #else bool operator==(BindIndexBufferIndirectCommandNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (bufferAddress == rhs.bufferAddress) && (size == rhs.size) && (indexType == rhs.indexType); # endif } bool operator!=(BindIndexBufferIndirectCommandNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {}; uint32_t size = {}; VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV) == sizeof(VkBindIndexBufferIndirectCommandNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BindIndexBufferIndirectCommandNV is not nothrow_move_constructible!"); struct BindShaderGroupIndirectCommandNV { using NativeType = VkBindShaderGroupIndirectCommandNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV(uint32_t groupIndex_ = {}) VULKAN_HPP_NOEXCEPT : groupIndex(groupIndex_) {} VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV(BindShaderGroupIndirectCommandNV const &rhs) VULKAN_HPP_NOEXCEPT = default; BindShaderGroupIndirectCommandNV(VkBindShaderGroupIndirectCommandNV const &rhs) VULKAN_HPP_NOEXCEPT : BindShaderGroupIndirectCommandNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BindShaderGroupIndirectCommandNV &operator=(BindShaderGroupIndirectCommandNV const &rhs) VULKAN_HPP_NOEXCEPT = default; BindShaderGroupIndirectCommandNV &operator=(VkBindShaderGroupIndirectCommandNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BindShaderGroupIndirectCommandNV &setGroupIndex(uint32_t groupIndex_) VULKAN_HPP_NOEXCEPT { groupIndex = groupIndex_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBindShaderGroupIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBindShaderGroupIndirectCommandNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(groupIndex); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BindShaderGroupIndirectCommandNV const &) const = default; #else bool operator==(BindShaderGroupIndirectCommandNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (groupIndex == rhs.groupIndex); # endif } bool operator!=(BindShaderGroupIndirectCommandNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t groupIndex = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV) == sizeof(VkBindShaderGroupIndirectCommandNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BindShaderGroupIndirectCommandNV is not nothrow_move_constructible!"); struct SparseMemoryBind { using NativeType = VkSparseMemoryBind; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SparseMemoryBind(VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {}) VULKAN_HPP_NOEXCEPT : resourceOffset(resourceOffset_), size(size_), memory(memory_), memoryOffset(memoryOffset_), flags(flags_) { } VULKAN_HPP_CONSTEXPR SparseMemoryBind(SparseMemoryBind const &rhs) VULKAN_HPP_NOEXCEPT = default; SparseMemoryBind(VkSparseMemoryBind const &rhs) VULKAN_HPP_NOEXCEPT : SparseMemoryBind(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SparseMemoryBind &operator=(SparseMemoryBind const &rhs) VULKAN_HPP_NOEXCEPT = default; SparseMemoryBind &operator=(VkSparseMemoryBind const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind &setResourceOffset(VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_) VULKAN_HPP_NOEXCEPT { resourceOffset = resourceOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind &setSize(VULKAN_HPP_NAMESPACE::DeviceSize size_) VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind &setMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory_) VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind &setMemoryOffset(VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_) VULKAN_HPP_NOEXCEPT { memoryOffset = memoryOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind &setFlags(VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSparseMemoryBind const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSparseMemoryBind &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(resourceOffset, size, memory, memoryOffset, flags); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SparseMemoryBind const &) const = default; #else bool operator==(SparseMemoryBind const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (resourceOffset == rhs.resourceOffset) && (size == rhs.size) && (memory == rhs.memory) && (memoryOffset == rhs.memoryOffset) && (flags == rhs.flags); # endif } bool operator!=(SparseMemoryBind const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset = {}; VULKAN_HPP_NAMESPACE::DeviceSize size = {}; VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SparseMemoryBind) == sizeof(VkSparseMemoryBind), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SparseMemoryBind is not nothrow_move_constructible!"); struct SparseBufferMemoryBindInfo { using NativeType = VkSparseBufferMemoryBindInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, uint32_t bindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseMemoryBind *pBinds_ = {}) VULKAN_HPP_NOEXCEPT : buffer(buffer_), bindCount(bindCount_), pBinds(pBinds_) { } VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo(SparseBufferMemoryBindInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SparseBufferMemoryBindInfo(VkSparseBufferMemoryBindInfo const &rhs) VULKAN_HPP_NOEXCEPT : SparseBufferMemoryBindInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SparseBufferMemoryBindInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &binds_) : buffer(buffer_) , bindCount(static_cast(binds_.size())) , pBinds(binds_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SparseBufferMemoryBindInfo &operator=(SparseBufferMemoryBindInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SparseBufferMemoryBindInfo &operator=(VkSparseBufferMemoryBindInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo &setBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer_) VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo &setBindCount(uint32_t bindCount_) VULKAN_HPP_NOEXCEPT { bindCount = bindCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo &setPBinds(const VULKAN_HPP_NAMESPACE::SparseMemoryBind *pBinds_) VULKAN_HPP_NOEXCEPT { pBinds = pBinds_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SparseBufferMemoryBindInfo & setBinds(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &binds_) VULKAN_HPP_NOEXCEPT { bindCount = static_cast(binds_.size()); pBinds = binds_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSparseBufferMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSparseBufferMemoryBindInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(buffer, bindCount, pBinds); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SparseBufferMemoryBindInfo const &) const = default; #else bool operator==(SparseBufferMemoryBindInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (buffer == rhs.buffer) && (bindCount == rhs.bindCount) && (pBinds == rhs.pBinds); # endif } bool operator!=(SparseBufferMemoryBindInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::Buffer buffer = {}; uint32_t bindCount = {}; const VULKAN_HPP_NAMESPACE::SparseMemoryBind *pBinds = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo) == sizeof(VkSparseBufferMemoryBindInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SparseBufferMemoryBindInfo is not nothrow_move_constructible!"); struct SparseImageOpaqueMemoryBindInfo { using NativeType = VkSparseImageOpaqueMemoryBindInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, uint32_t bindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseMemoryBind *pBinds_ = {}) VULKAN_HPP_NOEXCEPT : image(image_), bindCount(bindCount_), pBinds(pBinds_) { } VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo(SparseImageOpaqueMemoryBindInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SparseImageOpaqueMemoryBindInfo(VkSparseImageOpaqueMemoryBindInfo const &rhs) VULKAN_HPP_NOEXCEPT : SparseImageOpaqueMemoryBindInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SparseImageOpaqueMemoryBindInfo(VULKAN_HPP_NAMESPACE::Image image_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &binds_) : image(image_) , bindCount(static_cast(binds_.size())) , pBinds(binds_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SparseImageOpaqueMemoryBindInfo &operator=(SparseImageOpaqueMemoryBindInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SparseImageOpaqueMemoryBindInfo &operator=(VkSparseImageOpaqueMemoryBindInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo &setImage(VULKAN_HPP_NAMESPACE::Image image_) VULKAN_HPP_NOEXCEPT { image = image_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo &setBindCount(uint32_t bindCount_) VULKAN_HPP_NOEXCEPT { bindCount = bindCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo &setPBinds(const VULKAN_HPP_NAMESPACE::SparseMemoryBind *pBinds_) VULKAN_HPP_NOEXCEPT { pBinds = pBinds_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SparseImageOpaqueMemoryBindInfo & setBinds(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &binds_) VULKAN_HPP_NOEXCEPT { bindCount = static_cast(binds_.size()); pBinds = binds_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSparseImageOpaqueMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSparseImageOpaqueMemoryBindInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(image, bindCount, pBinds); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SparseImageOpaqueMemoryBindInfo const &) const = default; #else bool operator==(SparseImageOpaqueMemoryBindInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (image == rhs.image) && (bindCount == rhs.bindCount) && (pBinds == rhs.pBinds); # endif } bool operator!=(SparseImageOpaqueMemoryBindInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::Image image = {}; uint32_t bindCount = {}; const VULKAN_HPP_NAMESPACE::SparseMemoryBind *pBinds = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo) == sizeof(VkSparseImageOpaqueMemoryBindInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SparseImageOpaqueMemoryBindInfo is not nothrow_move_constructible!"); struct ImageSubresource { using NativeType = VkImageSubresource; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImageSubresource(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t mipLevel_ = {}, uint32_t arrayLayer_ = {}) VULKAN_HPP_NOEXCEPT : aspectMask(aspectMask_), mipLevel(mipLevel_), arrayLayer(arrayLayer_) { } VULKAN_HPP_CONSTEXPR ImageSubresource(ImageSubresource const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageSubresource(VkImageSubresource const &rhs) VULKAN_HPP_NOEXCEPT : ImageSubresource(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageSubresource &operator=(ImageSubresource const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageSubresource &operator=(VkImageSubresource const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImageSubresource &setAspectMask(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_) VULKAN_HPP_NOEXCEPT { aspectMask = aspectMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageSubresource &setMipLevel(uint32_t mipLevel_) VULKAN_HPP_NOEXCEPT { mipLevel = mipLevel_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageSubresource &setArrayLayer(uint32_t arrayLayer_) VULKAN_HPP_NOEXCEPT { arrayLayer = arrayLayer_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImageSubresource const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImageSubresource &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(aspectMask, mipLevel, arrayLayer); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImageSubresource const &) const = default; #else bool operator==(ImageSubresource const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (aspectMask == rhs.aspectMask) && (mipLevel == rhs.mipLevel) && (arrayLayer == rhs.arrayLayer); # endif } bool operator!=(ImageSubresource const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; uint32_t mipLevel = {}; uint32_t arrayLayer = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImageSubresource) == sizeof(VkImageSubresource), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImageSubresource is not nothrow_move_constructible!"); struct Offset3D { using NativeType = VkOffset3D; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR Offset3D(int32_t x_ = {}, int32_t y_ = {}, int32_t z_ = {}) VULKAN_HPP_NOEXCEPT : x(x_), y(y_), z(z_) { } VULKAN_HPP_CONSTEXPR Offset3D(Offset3D const &rhs) VULKAN_HPP_NOEXCEPT = default; Offset3D(VkOffset3D const &rhs) VULKAN_HPP_NOEXCEPT : Offset3D(*reinterpret_cast(&rhs)) {} explicit Offset3D(Offset2D const &offset2D, int32_t z_ = {}) : x(offset2D.x) , y(offset2D.y) , z(z_) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ Offset3D &operator=(Offset3D const &rhs) VULKAN_HPP_NOEXCEPT = default; Offset3D &operator=(VkOffset3D const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 Offset3D &setX(int32_t x_) VULKAN_HPP_NOEXCEPT { x = x_; return *this; } VULKAN_HPP_CONSTEXPR_14 Offset3D &setY(int32_t y_) VULKAN_HPP_NOEXCEPT { y = y_; return *this; } VULKAN_HPP_CONSTEXPR_14 Offset3D &setZ(int32_t z_) VULKAN_HPP_NOEXCEPT { z = z_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkOffset3D const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkOffset3D &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(x, y, z); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(Offset3D const &) const = default; #else bool operator==(Offset3D const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (x == rhs.x) && (y == rhs.y) && (z == rhs.z); # endif } bool operator!=(Offset3D const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: int32_t x = {}; int32_t y = {}; int32_t z = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::Offset3D) == sizeof(VkOffset3D), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "Offset3D is not nothrow_move_constructible!"); struct Extent3D { using NativeType = VkExtent3D; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR Extent3D(uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {}) VULKAN_HPP_NOEXCEPT : width(width_), height(height_), depth(depth_) { } VULKAN_HPP_CONSTEXPR Extent3D(Extent3D const &rhs) VULKAN_HPP_NOEXCEPT = default; Extent3D(VkExtent3D const &rhs) VULKAN_HPP_NOEXCEPT : Extent3D(*reinterpret_cast(&rhs)) {} explicit Extent3D(Extent2D const &extent2D, uint32_t depth_ = {}) : width(extent2D.width) , height(extent2D.height) , depth(depth_) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ Extent3D &operator=(Extent3D const &rhs) VULKAN_HPP_NOEXCEPT = default; Extent3D &operator=(VkExtent3D const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 Extent3D &setWidth(uint32_t width_) VULKAN_HPP_NOEXCEPT { width = width_; return *this; } VULKAN_HPP_CONSTEXPR_14 Extent3D &setHeight(uint32_t height_) VULKAN_HPP_NOEXCEPT { height = height_; return *this; } VULKAN_HPP_CONSTEXPR_14 Extent3D &setDepth(uint32_t depth_) VULKAN_HPP_NOEXCEPT { depth = depth_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkExtent3D const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkExtent3D &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(width, height, depth); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(Extent3D const &) const = default; #else bool operator==(Extent3D const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (width == rhs.width) && (height == rhs.height) && (depth == rhs.depth); # endif } bool operator!=(Extent3D const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t width = {}; uint32_t height = {}; uint32_t depth = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::Extent3D) == sizeof(VkExtent3D), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "Extent3D is not nothrow_move_constructible!"); struct SparseImageMemoryBind { using NativeType = VkSparseImageMemoryBind; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SparseImageMemoryBind(VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {}) VULKAN_HPP_NOEXCEPT : subresource(subresource_), offset(offset_), extent(extent_), memory(memory_), memoryOffset(memoryOffset_), flags(flags_) { } VULKAN_HPP_CONSTEXPR SparseImageMemoryBind(SparseImageMemoryBind const &rhs) VULKAN_HPP_NOEXCEPT = default; SparseImageMemoryBind(VkSparseImageMemoryBind const &rhs) VULKAN_HPP_NOEXCEPT : SparseImageMemoryBind(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SparseImageMemoryBind &operator=(SparseImageMemoryBind const &rhs) VULKAN_HPP_NOEXCEPT = default; SparseImageMemoryBind &operator=(VkSparseImageMemoryBind const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind &setSubresource(VULKAN_HPP_NAMESPACE::ImageSubresource const &subresource_) VULKAN_HPP_NOEXCEPT { subresource = subresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind &setOffset(VULKAN_HPP_NAMESPACE::Offset3D const &offset_) VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind &setExtent(VULKAN_HPP_NAMESPACE::Extent3D const &extent_) VULKAN_HPP_NOEXCEPT { extent = extent_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind &setMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory_) VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind &setMemoryOffset(VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_) VULKAN_HPP_NOEXCEPT { memoryOffset = memoryOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind &setFlags(VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSparseImageMemoryBind const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSparseImageMemoryBind &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(subresource, offset, extent, memory, memoryOffset, flags); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SparseImageMemoryBind const &) const = default; #else bool operator==(SparseImageMemoryBind const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (subresource == rhs.subresource) && (offset == rhs.offset) && (extent == rhs.extent) && (memory == rhs.memory) && (memoryOffset == rhs.memoryOffset) && (flags == rhs.flags); # endif } bool operator!=(SparseImageMemoryBind const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::ImageSubresource subresource = {}; VULKAN_HPP_NAMESPACE::Offset3D offset = {}; VULKAN_HPP_NAMESPACE::Extent3D extent = {}; VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SparseImageMemoryBind) == sizeof(VkSparseImageMemoryBind), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SparseImageMemoryBind is not nothrow_move_constructible!"); struct SparseImageMemoryBindInfo { using NativeType = VkSparseImageMemoryBindInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, uint32_t bindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind *pBinds_ = {}) VULKAN_HPP_NOEXCEPT : image(image_), bindCount(bindCount_), pBinds(pBinds_) { } VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo(SparseImageMemoryBindInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SparseImageMemoryBindInfo(VkSparseImageMemoryBindInfo const &rhs) VULKAN_HPP_NOEXCEPT : SparseImageMemoryBindInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SparseImageMemoryBindInfo(VULKAN_HPP_NAMESPACE::Image image_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &binds_) : image(image_) , bindCount(static_cast(binds_.size())) , pBinds(binds_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SparseImageMemoryBindInfo &operator=(SparseImageMemoryBindInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SparseImageMemoryBindInfo &operator=(VkSparseImageMemoryBindInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo &setImage(VULKAN_HPP_NAMESPACE::Image image_) VULKAN_HPP_NOEXCEPT { image = image_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo &setBindCount(uint32_t bindCount_) VULKAN_HPP_NOEXCEPT { bindCount = bindCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo &setPBinds(const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind *pBinds_) VULKAN_HPP_NOEXCEPT { pBinds = pBinds_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SparseImageMemoryBindInfo & setBinds(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &binds_) VULKAN_HPP_NOEXCEPT { bindCount = static_cast(binds_.size()); pBinds = binds_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSparseImageMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSparseImageMemoryBindInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(image, bindCount, pBinds); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SparseImageMemoryBindInfo const &) const = default; #else bool operator==(SparseImageMemoryBindInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (image == rhs.image) && (bindCount == rhs.bindCount) && (pBinds == rhs.pBinds); # endif } bool operator!=(SparseImageMemoryBindInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::Image image = {}; uint32_t bindCount = {}; const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind *pBinds = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo) == sizeof(VkSparseImageMemoryBindInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SparseImageMemoryBindInfo is not nothrow_move_constructible!"); struct BindSparseInfo { using NativeType = VkBindSparseInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindSparseInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR BindSparseInfo(uint32_t waitSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore *pWaitSemaphores_ = {}, uint32_t bufferBindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo *pBufferBinds_ = {}, uint32_t imageOpaqueBindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo *pImageOpaqueBinds_ = {}, uint32_t imageBindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo *pImageBinds_ = {}, uint32_t signalSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore *pSignalSemaphores_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), waitSemaphoreCount(waitSemaphoreCount_), pWaitSemaphores(pWaitSemaphores_), bufferBindCount(bufferBindCount_), pBufferBinds(pBufferBinds_), imageOpaqueBindCount(imageOpaqueBindCount_), pImageOpaqueBinds(pImageOpaqueBinds_), imageBindCount(imageBindCount_), pImageBinds(pImageBinds_), signalSemaphoreCount(signalSemaphoreCount_), pSignalSemaphores(pSignalSemaphores_) { } VULKAN_HPP_CONSTEXPR BindSparseInfo(BindSparseInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; BindSparseInfo(VkBindSparseInfo const &rhs) VULKAN_HPP_NOEXCEPT : BindSparseInfo(*reinterpret_cast(&rhs)) {} # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) BindSparseInfo(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &waitSemaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &bufferBinds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &imageOpaqueBinds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &imageBinds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &signalSemaphores_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , waitSemaphoreCount(static_cast(waitSemaphores_.size())) , pWaitSemaphores(waitSemaphores_.data()) , bufferBindCount(static_cast(bufferBinds_.size())) , pBufferBinds(bufferBinds_.data()) , imageOpaqueBindCount(static_cast(imageOpaqueBinds_.size())) , pImageOpaqueBinds(imageOpaqueBinds_.data()) , imageBindCount(static_cast(imageBinds_.size())) , pImageBinds(imageBinds_.data()) , signalSemaphoreCount(static_cast(signalSemaphores_.size())) , pSignalSemaphores(signalSemaphores_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BindSparseInfo &operator=(BindSparseInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; BindSparseInfo &operator=(VkBindSparseInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BindSparseInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindSparseInfo &setWaitSemaphoreCount(uint32_t waitSemaphoreCount_) VULKAN_HPP_NOEXCEPT { waitSemaphoreCount = waitSemaphoreCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindSparseInfo &setPWaitSemaphores(const VULKAN_HPP_NAMESPACE::Semaphore *pWaitSemaphores_) VULKAN_HPP_NOEXCEPT { pWaitSemaphores = pWaitSemaphores_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) BindSparseInfo & setWaitSemaphores(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &waitSemaphores_) VULKAN_HPP_NOEXCEPT { waitSemaphoreCount = static_cast(waitSemaphores_.size()); pWaitSemaphores = waitSemaphores_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 BindSparseInfo &setBufferBindCount(uint32_t bufferBindCount_) VULKAN_HPP_NOEXCEPT { bufferBindCount = bufferBindCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindSparseInfo &setPBufferBinds(const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo *pBufferBinds_) VULKAN_HPP_NOEXCEPT { pBufferBinds = pBufferBinds_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) BindSparseInfo &setBufferBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &bufferBinds_) VULKAN_HPP_NOEXCEPT { bufferBindCount = static_cast(bufferBinds_.size()); pBufferBinds = bufferBinds_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 BindSparseInfo &setImageOpaqueBindCount(uint32_t imageOpaqueBindCount_) VULKAN_HPP_NOEXCEPT { imageOpaqueBindCount = imageOpaqueBindCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPImageOpaqueBinds(const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo *pImageOpaqueBinds_) VULKAN_HPP_NOEXCEPT { pImageOpaqueBinds = pImageOpaqueBinds_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) BindSparseInfo &setImageOpaqueBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &imageOpaqueBinds_) VULKAN_HPP_NOEXCEPT { imageOpaqueBindCount = static_cast(imageOpaqueBinds_.size()); pImageOpaqueBinds = imageOpaqueBinds_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 BindSparseInfo &setImageBindCount(uint32_t imageBindCount_) VULKAN_HPP_NOEXCEPT { imageBindCount = imageBindCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindSparseInfo &setPImageBinds(const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo *pImageBinds_) VULKAN_HPP_NOEXCEPT { pImageBinds = pImageBinds_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) BindSparseInfo &setImageBinds(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &imageBinds_) VULKAN_HPP_NOEXCEPT { imageBindCount = static_cast(imageBinds_.size()); pImageBinds = imageBinds_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 BindSparseInfo &setSignalSemaphoreCount(uint32_t signalSemaphoreCount_) VULKAN_HPP_NOEXCEPT { signalSemaphoreCount = signalSemaphoreCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindSparseInfo &setPSignalSemaphores(const VULKAN_HPP_NAMESPACE::Semaphore *pSignalSemaphores_) VULKAN_HPP_NOEXCEPT { pSignalSemaphores = pSignalSemaphores_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) BindSparseInfo & setSignalSemaphores(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &signalSemaphores_) VULKAN_HPP_NOEXCEPT { signalSemaphoreCount = static_cast(signalSemaphores_.size()); pSignalSemaphores = signalSemaphores_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBindSparseInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, waitSemaphoreCount, pWaitSemaphores, bufferBindCount, pBufferBinds, imageOpaqueBindCount, pImageOpaqueBinds, imageBindCount, pImageBinds, signalSemaphoreCount, pSignalSemaphores); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BindSparseInfo const &) const = default; #else bool operator==(BindSparseInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (waitSemaphoreCount == rhs.waitSemaphoreCount) && (pWaitSemaphores == rhs.pWaitSemaphores) && (bufferBindCount == rhs.bufferBindCount) && (pBufferBinds == rhs.pBufferBinds) && (imageOpaqueBindCount == rhs.imageOpaqueBindCount) && (pImageOpaqueBinds == rhs.pImageOpaqueBinds) && (imageBindCount == rhs.imageBindCount) && (pImageBinds == rhs.pImageBinds) && (signalSemaphoreCount == rhs.signalSemaphoreCount) && (pSignalSemaphores == rhs.pSignalSemaphores); # endif } bool operator!=(BindSparseInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindSparseInfo; const void *pNext = {}; uint32_t waitSemaphoreCount = {}; const VULKAN_HPP_NAMESPACE::Semaphore *pWaitSemaphores = {}; uint32_t bufferBindCount = {}; const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo *pBufferBinds = {}; uint32_t imageOpaqueBindCount = {}; const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo *pImageOpaqueBinds = {}; uint32_t imageBindCount = {}; const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo *pImageBinds = {}; uint32_t signalSemaphoreCount = {}; const VULKAN_HPP_NAMESPACE::Semaphore *pSignalSemaphores = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BindSparseInfo) == sizeof(VkBindSparseInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BindSparseInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = BindSparseInfo; }; struct BindVertexBufferIndirectCommandNV { using NativeType = VkBindVertexBufferIndirectCommandNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV(VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, uint32_t size_ = {}, uint32_t stride_ = {}) VULKAN_HPP_NOEXCEPT : bufferAddress(bufferAddress_), size(size_), stride(stride_) { } VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV(BindVertexBufferIndirectCommandNV const &rhs) VULKAN_HPP_NOEXCEPT = default; BindVertexBufferIndirectCommandNV(VkBindVertexBufferIndirectCommandNV const &rhs) VULKAN_HPP_NOEXCEPT : BindVertexBufferIndirectCommandNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BindVertexBufferIndirectCommandNV &operator=(BindVertexBufferIndirectCommandNV const &rhs) VULKAN_HPP_NOEXCEPT = default; BindVertexBufferIndirectCommandNV &operator=(VkBindVertexBufferIndirectCommandNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV &setBufferAddress(VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_) VULKAN_HPP_NOEXCEPT { bufferAddress = bufferAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV &setSize(uint32_t size_) VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV &setStride(uint32_t stride_) VULKAN_HPP_NOEXCEPT { stride = stride_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBindVertexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBindVertexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(bufferAddress, size, stride); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BindVertexBufferIndirectCommandNV const &) const = default; #else bool operator==(BindVertexBufferIndirectCommandNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (bufferAddress == rhs.bufferAddress) && (size == rhs.size) && (stride == rhs.stride); # endif } bool operator!=(BindVertexBufferIndirectCommandNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {}; uint32_t size = {}; uint32_t stride = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV) == sizeof(VkBindVertexBufferIndirectCommandNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BindVertexBufferIndirectCommandNV is not nothrow_move_constructible!"); struct ImageSubresourceLayers { using NativeType = VkImageSubresourceLayers; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImageSubresourceLayers(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t mipLevel_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {}) VULKAN_HPP_NOEXCEPT : aspectMask(aspectMask_), mipLevel(mipLevel_), baseArrayLayer(baseArrayLayer_), layerCount(layerCount_) { } VULKAN_HPP_CONSTEXPR ImageSubresourceLayers(ImageSubresourceLayers const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageSubresourceLayers(VkImageSubresourceLayers const &rhs) VULKAN_HPP_NOEXCEPT : ImageSubresourceLayers(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageSubresourceLayers &operator=(ImageSubresourceLayers const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageSubresourceLayers &operator=(VkImageSubresourceLayers const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers &setAspectMask(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_) VULKAN_HPP_NOEXCEPT { aspectMask = aspectMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers &setMipLevel(uint32_t mipLevel_) VULKAN_HPP_NOEXCEPT { mipLevel = mipLevel_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers &setBaseArrayLayer(uint32_t baseArrayLayer_) VULKAN_HPP_NOEXCEPT { baseArrayLayer = baseArrayLayer_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers &setLayerCount(uint32_t layerCount_) VULKAN_HPP_NOEXCEPT { layerCount = layerCount_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImageSubresourceLayers const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImageSubresourceLayers &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(aspectMask, mipLevel, baseArrayLayer, layerCount); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImageSubresourceLayers const &) const = default; #else bool operator==(ImageSubresourceLayers const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (aspectMask == rhs.aspectMask) && (mipLevel == rhs.mipLevel) && (baseArrayLayer == rhs.baseArrayLayer) && (layerCount == rhs.layerCount); # endif } bool operator!=(ImageSubresourceLayers const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; uint32_t mipLevel = {}; uint32_t baseArrayLayer = {}; uint32_t layerCount = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers) == sizeof(VkImageSubresourceLayers), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImageSubresourceLayers is not nothrow_move_constructible!"); struct ImageBlit2 { using NativeType = VkImageBlit2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageBlit2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 ImageBlit2(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, std::array const &srcOffsets_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, std::array const &dstOffsets_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), srcSubresource(srcSubresource_), srcOffsets(srcOffsets_), dstSubresource(dstSubresource_), dstOffsets(dstOffsets_) { } VULKAN_HPP_CONSTEXPR_14 ImageBlit2(ImageBlit2 const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageBlit2(VkImageBlit2 const &rhs) VULKAN_HPP_NOEXCEPT : ImageBlit2(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageBlit2 &operator=(ImageBlit2 const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageBlit2 &operator=(VkImageBlit2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImageBlit2 &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageBlit2 &setSrcSubresource(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &srcSubresource_) VULKAN_HPP_NOEXCEPT { srcSubresource = srcSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageBlit2 &setSrcOffsets(std::array const &srcOffsets_) VULKAN_HPP_NOEXCEPT { srcOffsets = srcOffsets_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageBlit2 &setDstSubresource(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &dstSubresource_) VULKAN_HPP_NOEXCEPT { dstSubresource = dstSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageBlit2 &setDstOffsets(std::array const &dstOffsets_) VULKAN_HPP_NOEXCEPT { dstOffsets = dstOffsets_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImageBlit2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImageBlit2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, srcSubresource, srcOffsets, dstSubresource, dstOffsets); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImageBlit2 const &) const = default; #else bool operator==(ImageBlit2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (srcSubresource == rhs.srcSubresource) && (srcOffsets == rhs.srcOffsets) && (dstSubresource == rhs.dstSubresource) && (dstOffsets == rhs.dstOffsets); # endif } bool operator!=(ImageBlit2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageBlit2; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D srcOffsets = {}; VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D dstOffsets = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImageBlit2) == sizeof(VkImageBlit2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImageBlit2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImageBlit2; }; using ImageBlit2KHR = ImageBlit2; struct BlitImageInfo2 { using NativeType = VkBlitImageInfo2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBlitImageInfo2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageBlit2 *pRegions_ = {}, VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), srcImage(srcImage_), srcImageLayout(srcImageLayout_), dstImage(dstImage_), dstImageLayout(dstImageLayout_), regionCount(regionCount_), pRegions(pRegions_), filter(filter_) { } VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2(BlitImageInfo2 const &rhs) VULKAN_HPP_NOEXCEPT = default; BlitImageInfo2(VkBlitImageInfo2 const &rhs) VULKAN_HPP_NOEXCEPT : BlitImageInfo2(*reinterpret_cast(&rhs)) {} # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) BlitImageInfo2(VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const ®ions_, VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, const void *pNext_ = nullptr) : pNext(pNext_) , srcImage(srcImage_) , srcImageLayout(srcImageLayout_) , dstImage(dstImage_) , dstImageLayout(dstImageLayout_) , regionCount(static_cast(regions_.size())) , pRegions(regions_.data()) , filter(filter_) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BlitImageInfo2 &operator=(BlitImageInfo2 const &rhs) VULKAN_HPP_NOEXCEPT = default; BlitImageInfo2 &operator=(VkBlitImageInfo2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 &setSrcImage(VULKAN_HPP_NAMESPACE::Image srcImage_) VULKAN_HPP_NOEXCEPT { srcImage = srcImage_; return *this; } VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 &setSrcImageLayout(VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_) VULKAN_HPP_NOEXCEPT { srcImageLayout = srcImageLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 &setDstImage(VULKAN_HPP_NAMESPACE::Image dstImage_) VULKAN_HPP_NOEXCEPT { dstImage = dstImage_; return *this; } VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 &setDstImageLayout(VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_) VULKAN_HPP_NOEXCEPT { dstImageLayout = dstImageLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 &setRegionCount(uint32_t regionCount_) VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 &setPRegions(const VULKAN_HPP_NAMESPACE::ImageBlit2 *pRegions_) VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) BlitImageInfo2 &setRegions(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const ®ions_) VULKAN_HPP_NOEXCEPT { regionCount = static_cast(regions_.size()); pRegions = regions_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 &setFilter(VULKAN_HPP_NAMESPACE::Filter filter_) VULKAN_HPP_NOEXCEPT { filter = filter_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBlitImageInfo2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBlitImageInfo2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BlitImageInfo2 const &) const = default; #else bool operator==(BlitImageInfo2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (srcImage == rhs.srcImage) && (srcImageLayout == rhs.srcImageLayout) && (dstImage == rhs.dstImage) && (dstImageLayout == rhs.dstImageLayout) && (regionCount == rhs.regionCount) && (pRegions == rhs.pRegions) && (filter == rhs.filter); # endif } bool operator!=(BlitImageInfo2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBlitImageInfo2; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Image srcImage = {}; VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; VULKAN_HPP_NAMESPACE::Image dstImage = {}; VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; uint32_t regionCount = {}; const VULKAN_HPP_NAMESPACE::ImageBlit2 *pRegions = {}; VULKAN_HPP_NAMESPACE::Filter filter = VULKAN_HPP_NAMESPACE::Filter::eNearest; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BlitImageInfo2) == sizeof(VkBlitImageInfo2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BlitImageInfo2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = BlitImageInfo2; }; using BlitImageInfo2KHR = BlitImageInfo2; #if defined(VK_USE_PLATFORM_FUCHSIA) struct BufferCollectionBufferCreateInfoFUCHSIA { using NativeType = VkBufferCollectionBufferCreateInfoFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionBufferCreateInfoFUCHSIA; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR BufferCollectionBufferCreateInfoFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {}, uint32_t index_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), collection(collection_), index(index_) { } VULKAN_HPP_CONSTEXPR BufferCollectionBufferCreateInfoFUCHSIA(BufferCollectionBufferCreateInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferCollectionBufferCreateInfoFUCHSIA(VkBufferCollectionBufferCreateInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT : BufferCollectionBufferCreateInfoFUCHSIA(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferCollectionBufferCreateInfoFUCHSIA &operator=(BufferCollectionBufferCreateInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferCollectionBufferCreateInfoFUCHSIA &operator=(VkBufferCollectionBufferCreateInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA & setCollection(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_) VULKAN_HPP_NOEXCEPT { collection = collection_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA &setIndex(uint32_t index_) VULKAN_HPP_NOEXCEPT { index = index_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBufferCollectionBufferCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBufferCollectionBufferCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, collection, index); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BufferCollectionBufferCreateInfoFUCHSIA const &) const = default; # else bool operator==(BufferCollectionBufferCreateInfoFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (collection == rhs.collection) && (index == rhs.index); # endif } bool operator!=(BufferCollectionBufferCreateInfoFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionBufferCreateInfoFUCHSIA; const void *pNext = {}; VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection = {}; uint32_t index = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA) == sizeof(VkBufferCollectionBufferCreateInfoFUCHSIA), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BufferCollectionBufferCreateInfoFUCHSIA is not nothrow_move_constructible!"); template<> struct CppType { using Type = BufferCollectionBufferCreateInfoFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined(VK_USE_PLATFORM_FUCHSIA) struct BufferCollectionConstraintsInfoFUCHSIA { using NativeType = VkBufferCollectionConstraintsInfoFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionConstraintsInfoFUCHSIA; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR BufferCollectionConstraintsInfoFUCHSIA(uint32_t minBufferCount_ = {}, uint32_t maxBufferCount_ = {}, uint32_t minBufferCountForCamping_ = {}, uint32_t minBufferCountForDedicatedSlack_ = {}, uint32_t minBufferCountForSharedSlack_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), minBufferCount(minBufferCount_), maxBufferCount(maxBufferCount_), minBufferCountForCamping(minBufferCountForCamping_), minBufferCountForDedicatedSlack(minBufferCountForDedicatedSlack_), minBufferCountForSharedSlack(minBufferCountForSharedSlack_) { } VULKAN_HPP_CONSTEXPR BufferCollectionConstraintsInfoFUCHSIA(BufferCollectionConstraintsInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferCollectionConstraintsInfoFUCHSIA(VkBufferCollectionConstraintsInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT : BufferCollectionConstraintsInfoFUCHSIA(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferCollectionConstraintsInfoFUCHSIA &operator=(BufferCollectionConstraintsInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferCollectionConstraintsInfoFUCHSIA &operator=(VkBufferCollectionConstraintsInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA &setMinBufferCount(uint32_t minBufferCount_) VULKAN_HPP_NOEXCEPT { minBufferCount = minBufferCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA &setMaxBufferCount(uint32_t maxBufferCount_) VULKAN_HPP_NOEXCEPT { maxBufferCount = maxBufferCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA &setMinBufferCountForCamping(uint32_t minBufferCountForCamping_) VULKAN_HPP_NOEXCEPT { minBufferCountForCamping = minBufferCountForCamping_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMinBufferCountForDedicatedSlack(uint32_t minBufferCountForDedicatedSlack_) VULKAN_HPP_NOEXCEPT { minBufferCountForDedicatedSlack = minBufferCountForDedicatedSlack_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMinBufferCountForSharedSlack(uint32_t minBufferCountForSharedSlack_) VULKAN_HPP_NOEXCEPT { minBufferCountForSharedSlack = minBufferCountForSharedSlack_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBufferCollectionConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBufferCollectionConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, minBufferCount, maxBufferCount, minBufferCountForCamping, minBufferCountForDedicatedSlack, minBufferCountForSharedSlack); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BufferCollectionConstraintsInfoFUCHSIA const &) const = default; # else bool operator==(BufferCollectionConstraintsInfoFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (minBufferCount == rhs.minBufferCount) && (maxBufferCount == rhs.maxBufferCount) && (minBufferCountForCamping == rhs.minBufferCountForCamping) && (minBufferCountForDedicatedSlack == rhs.minBufferCountForDedicatedSlack) && (minBufferCountForSharedSlack == rhs.minBufferCountForSharedSlack); # endif } bool operator!=(BufferCollectionConstraintsInfoFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionConstraintsInfoFUCHSIA; const void *pNext = {}; uint32_t minBufferCount = {}; uint32_t maxBufferCount = {}; uint32_t minBufferCountForCamping = {}; uint32_t minBufferCountForDedicatedSlack = {}; uint32_t minBufferCountForSharedSlack = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA) == sizeof(VkBufferCollectionConstraintsInfoFUCHSIA), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BufferCollectionConstraintsInfoFUCHSIA is not nothrow_move_constructible!"); template<> struct CppType { using Type = BufferCollectionConstraintsInfoFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined(VK_USE_PLATFORM_FUCHSIA) struct BufferCollectionCreateInfoFUCHSIA { using NativeType = VkBufferCollectionCreateInfoFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionCreateInfoFUCHSIA; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR BufferCollectionCreateInfoFUCHSIA(zx_handle_t collectionToken_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), collectionToken(collectionToken_) { } VULKAN_HPP_CONSTEXPR BufferCollectionCreateInfoFUCHSIA(BufferCollectionCreateInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferCollectionCreateInfoFUCHSIA(VkBufferCollectionCreateInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT : BufferCollectionCreateInfoFUCHSIA(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferCollectionCreateInfoFUCHSIA &operator=(BufferCollectionCreateInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferCollectionCreateInfoFUCHSIA &operator=(VkBufferCollectionCreateInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BufferCollectionCreateInfoFUCHSIA &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionCreateInfoFUCHSIA &setCollectionToken(zx_handle_t collectionToken_) VULKAN_HPP_NOEXCEPT { collectionToken = collectionToken_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBufferCollectionCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBufferCollectionCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, collectionToken); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) std::strong_ordering operator<=>(BufferCollectionCreateInfoFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { if(auto cmp = sType <=> rhs.sType; cmp != 0) return cmp; if(auto cmp = pNext <=> rhs.pNext; cmp != 0) return cmp; if(auto cmp = memcmp(&collectionToken, &rhs.collectionToken, sizeof(zx_handle_t)); cmp != 0) return (cmp < 0) ? std::strong_ordering::less : std::strong_ordering::greater; return std::strong_ordering::equivalent; } # endif bool operator==(BufferCollectionCreateInfoFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { return (sType == rhs.sType) && (pNext == rhs.pNext) && (memcmp(&collectionToken, &rhs.collectionToken, sizeof(zx_handle_t)) == 0); } bool operator!=(BufferCollectionCreateInfoFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionCreateInfoFUCHSIA; const void *pNext = {}; zx_handle_t collectionToken = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA) == sizeof(VkBufferCollectionCreateInfoFUCHSIA), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BufferCollectionCreateInfoFUCHSIA is not nothrow_move_constructible!"); template<> struct CppType { using Type = BufferCollectionCreateInfoFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined(VK_USE_PLATFORM_FUCHSIA) struct BufferCollectionImageCreateInfoFUCHSIA { using NativeType = VkBufferCollectionImageCreateInfoFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionImageCreateInfoFUCHSIA; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR BufferCollectionImageCreateInfoFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {}, uint32_t index_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), collection(collection_), index(index_) { } VULKAN_HPP_CONSTEXPR BufferCollectionImageCreateInfoFUCHSIA(BufferCollectionImageCreateInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferCollectionImageCreateInfoFUCHSIA(VkBufferCollectionImageCreateInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT : BufferCollectionImageCreateInfoFUCHSIA(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferCollectionImageCreateInfoFUCHSIA &operator=(BufferCollectionImageCreateInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferCollectionImageCreateInfoFUCHSIA &operator=(VkBufferCollectionImageCreateInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA & setCollection(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_) VULKAN_HPP_NOEXCEPT { collection = collection_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA &setIndex(uint32_t index_) VULKAN_HPP_NOEXCEPT { index = index_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBufferCollectionImageCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBufferCollectionImageCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, collection, index); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BufferCollectionImageCreateInfoFUCHSIA const &) const = default; # else bool operator==(BufferCollectionImageCreateInfoFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (collection == rhs.collection) && (index == rhs.index); # endif } bool operator!=(BufferCollectionImageCreateInfoFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionImageCreateInfoFUCHSIA; const void *pNext = {}; VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection = {}; uint32_t index = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA) == sizeof(VkBufferCollectionImageCreateInfoFUCHSIA), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BufferCollectionImageCreateInfoFUCHSIA is not nothrow_move_constructible!"); template<> struct CppType { using Type = BufferCollectionImageCreateInfoFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined(VK_USE_PLATFORM_FUCHSIA) struct SysmemColorSpaceFUCHSIA { using NativeType = VkSysmemColorSpaceFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSysmemColorSpaceFUCHSIA; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SysmemColorSpaceFUCHSIA(uint32_t colorSpace_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), colorSpace(colorSpace_) { } VULKAN_HPP_CONSTEXPR SysmemColorSpaceFUCHSIA(SysmemColorSpaceFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; SysmemColorSpaceFUCHSIA(VkSysmemColorSpaceFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT : SysmemColorSpaceFUCHSIA(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SysmemColorSpaceFUCHSIA &operator=(SysmemColorSpaceFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; SysmemColorSpaceFUCHSIA &operator=(VkSysmemColorSpaceFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SysmemColorSpaceFUCHSIA &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SysmemColorSpaceFUCHSIA &setColorSpace(uint32_t colorSpace_) VULKAN_HPP_NOEXCEPT { colorSpace = colorSpace_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSysmemColorSpaceFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSysmemColorSpaceFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, colorSpace); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SysmemColorSpaceFUCHSIA const &) const = default; # else bool operator==(SysmemColorSpaceFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (colorSpace == rhs.colorSpace); # endif } bool operator!=(SysmemColorSpaceFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSysmemColorSpaceFUCHSIA; const void *pNext = {}; uint32_t colorSpace = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA) == sizeof(VkSysmemColorSpaceFUCHSIA), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SysmemColorSpaceFUCHSIA is not nothrow_move_constructible!"); template<> struct CppType { using Type = SysmemColorSpaceFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined(VK_USE_PLATFORM_FUCHSIA) struct BufferCollectionPropertiesFUCHSIA { using NativeType = VkBufferCollectionPropertiesFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionPropertiesFUCHSIA; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR BufferCollectionPropertiesFUCHSIA( uint32_t memoryTypeBits_ = {}, uint32_t bufferCount_ = {}, uint32_t createInfoIndex_ = {}, uint64_t sysmemPixelFormat_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {}, VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA sysmemColorSpaceIndex_ = {}, VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), memoryTypeBits(memoryTypeBits_), bufferCount(bufferCount_), createInfoIndex(createInfoIndex_), sysmemPixelFormat(sysmemPixelFormat_), formatFeatures(formatFeatures_), sysmemColorSpaceIndex(sysmemColorSpaceIndex_), samplerYcbcrConversionComponents(samplerYcbcrConversionComponents_), suggestedYcbcrModel(suggestedYcbcrModel_), suggestedYcbcrRange(suggestedYcbcrRange_), suggestedXChromaOffset(suggestedXChromaOffset_), suggestedYChromaOffset(suggestedYChromaOffset_) { } VULKAN_HPP_CONSTEXPR BufferCollectionPropertiesFUCHSIA(BufferCollectionPropertiesFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferCollectionPropertiesFUCHSIA(VkBufferCollectionPropertiesFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT : BufferCollectionPropertiesFUCHSIA(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferCollectionPropertiesFUCHSIA &operator=(BufferCollectionPropertiesFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferCollectionPropertiesFUCHSIA &operator=(VkBufferCollectionPropertiesFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA &setMemoryTypeBits(uint32_t memoryTypeBits_) VULKAN_HPP_NOEXCEPT { memoryTypeBits = memoryTypeBits_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA &setBufferCount(uint32_t bufferCount_) VULKAN_HPP_NOEXCEPT { bufferCount = bufferCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA &setCreateInfoIndex(uint32_t createInfoIndex_) VULKAN_HPP_NOEXCEPT { createInfoIndex = createInfoIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA &setSysmemPixelFormat(uint64_t sysmemPixelFormat_) VULKAN_HPP_NOEXCEPT { sysmemPixelFormat = sysmemPixelFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setFormatFeatures(VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_) VULKAN_HPP_NOEXCEPT { formatFeatures = formatFeatures_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setSysmemColorSpaceIndex(VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA const &sysmemColorSpaceIndex_) VULKAN_HPP_NOEXCEPT { sysmemColorSpaceIndex = sysmemColorSpaceIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setSamplerYcbcrConversionComponents(VULKAN_HPP_NAMESPACE::ComponentMapping const &samplerYcbcrConversionComponents_) VULKAN_HPP_NOEXCEPT { samplerYcbcrConversionComponents = samplerYcbcrConversionComponents_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setSuggestedYcbcrModel(VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_) VULKAN_HPP_NOEXCEPT { suggestedYcbcrModel = suggestedYcbcrModel_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setSuggestedYcbcrRange(VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_) VULKAN_HPP_NOEXCEPT { suggestedYcbcrRange = suggestedYcbcrRange_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setSuggestedXChromaOffset(VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_) VULKAN_HPP_NOEXCEPT { suggestedXChromaOffset = suggestedXChromaOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setSuggestedYChromaOffset(VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_) VULKAN_HPP_NOEXCEPT { suggestedYChromaOffset = suggestedYChromaOffset_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBufferCollectionPropertiesFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBufferCollectionPropertiesFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, memoryTypeBits, bufferCount, createInfoIndex, sysmemPixelFormat, formatFeatures, sysmemColorSpaceIndex, samplerYcbcrConversionComponents, suggestedYcbcrModel, suggestedYcbcrRange, suggestedXChromaOffset, suggestedYChromaOffset); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BufferCollectionPropertiesFUCHSIA const &) const = default; # else bool operator==(BufferCollectionPropertiesFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (memoryTypeBits == rhs.memoryTypeBits) && (bufferCount == rhs.bufferCount) && (createInfoIndex == rhs.createInfoIndex) && (sysmemPixelFormat == rhs.sysmemPixelFormat) && (formatFeatures == rhs.formatFeatures) && (sysmemColorSpaceIndex == rhs.sysmemColorSpaceIndex) && (samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents) && (suggestedYcbcrModel == rhs.suggestedYcbcrModel) && (suggestedYcbcrRange == rhs.suggestedYcbcrRange) && (suggestedXChromaOffset == rhs.suggestedXChromaOffset) && (suggestedYChromaOffset == rhs.suggestedYChromaOffset); # endif } bool operator!=(BufferCollectionPropertiesFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionPropertiesFUCHSIA; void *pNext = {}; uint32_t memoryTypeBits = {}; uint32_t bufferCount = {}; uint32_t createInfoIndex = {}; uint64_t sysmemPixelFormat = {}; VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {}; VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA sysmemColorSpaceIndex = {}; VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {}; VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA) == sizeof(VkBufferCollectionPropertiesFUCHSIA), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BufferCollectionPropertiesFUCHSIA is not nothrow_move_constructible!"); template<> struct CppType { using Type = BufferCollectionPropertiesFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ struct BufferCreateInfo { using NativeType = VkBufferCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR BufferCreateInfo(VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t *pQueueFamilyIndices_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), size(size_), usage(usage_), sharingMode(sharingMode_), queueFamilyIndexCount(queueFamilyIndexCount_), pQueueFamilyIndices(pQueueFamilyIndices_) { } VULKAN_HPP_CONSTEXPR BufferCreateInfo(BufferCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferCreateInfo(VkBufferCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : BufferCreateInfo(*reinterpret_cast(&rhs)) {} # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) BufferCreateInfo(VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_, VULKAN_HPP_NAMESPACE::DeviceSize size_, VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &queueFamilyIndices_, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , size(size_) , usage(usage_) , sharingMode(sharingMode_) , queueFamilyIndexCount(static_cast(queueFamilyIndices_.size())) , pQueueFamilyIndices(queueFamilyIndices_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferCreateInfo &operator=(BufferCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferCreateInfo &operator=(VkBufferCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo &setFlags(VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo &setSize(VULKAN_HPP_NAMESPACE::DeviceSize size_) VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo &setUsage(VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_) VULKAN_HPP_NOEXCEPT { usage = usage_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo &setSharingMode(VULKAN_HPP_NAMESPACE::SharingMode sharingMode_) VULKAN_HPP_NOEXCEPT { sharingMode = sharingMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo &setQueueFamilyIndexCount(uint32_t queueFamilyIndexCount_) VULKAN_HPP_NOEXCEPT { queueFamilyIndexCount = queueFamilyIndexCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo &setPQueueFamilyIndices(const uint32_t *pQueueFamilyIndices_) VULKAN_HPP_NOEXCEPT { pQueueFamilyIndices = pQueueFamilyIndices_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) BufferCreateInfo &setQueueFamilyIndices(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &queueFamilyIndices_) VULKAN_HPP_NOEXCEPT { queueFamilyIndexCount = static_cast(queueFamilyIndices_.size()); pQueueFamilyIndices = queueFamilyIndices_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBufferCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, size, usage, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BufferCreateInfo const &) const = default; #else bool operator==(BufferCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (size == rhs.size) && (usage == rhs.usage) && (sharingMode == rhs.sharingMode) && (queueFamilyIndexCount == rhs.queueFamilyIndexCount) && (pQueueFamilyIndices == rhs.pQueueFamilyIndices); # endif } bool operator!=(BufferCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {}; VULKAN_HPP_NAMESPACE::DeviceSize size = {}; VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {}; VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; uint32_t queueFamilyIndexCount = {}; const uint32_t *pQueueFamilyIndices = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BufferCreateInfo) == sizeof(VkBufferCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BufferCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = BufferCreateInfo; }; #if defined(VK_USE_PLATFORM_FUCHSIA) struct BufferConstraintsInfoFUCHSIA { using NativeType = VkBufferConstraintsInfoFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferConstraintsInfoFUCHSIA; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR BufferConstraintsInfoFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCreateInfo createInfo_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ = {}, VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), createInfo(createInfo_), requiredFormatFeatures(requiredFormatFeatures_), bufferCollectionConstraints(bufferCollectionConstraints_) { } VULKAN_HPP_CONSTEXPR BufferConstraintsInfoFUCHSIA(BufferConstraintsInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferConstraintsInfoFUCHSIA(VkBufferConstraintsInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT : BufferConstraintsInfoFUCHSIA(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferConstraintsInfoFUCHSIA &operator=(BufferConstraintsInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferConstraintsInfoFUCHSIA &operator=(VkBufferConstraintsInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA &setCreateInfo(VULKAN_HPP_NAMESPACE::BufferCreateInfo const &createInfo_) VULKAN_HPP_NOEXCEPT { createInfo = createInfo_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setRequiredFormatFeatures(VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_) VULKAN_HPP_NOEXCEPT { requiredFormatFeatures = requiredFormatFeatures_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setBufferCollectionConstraints(VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const &bufferCollectionConstraints_) VULKAN_HPP_NOEXCEPT { bufferCollectionConstraints = bufferCollectionConstraints_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBufferConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBufferConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, createInfo, requiredFormatFeatures, bufferCollectionConstraints); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BufferConstraintsInfoFUCHSIA const &) const = default; # else bool operator==(BufferConstraintsInfoFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (createInfo == rhs.createInfo) && (requiredFormatFeatures == rhs.requiredFormatFeatures) && (bufferCollectionConstraints == rhs.bufferCollectionConstraints); # endif } bool operator!=(BufferConstraintsInfoFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferConstraintsInfoFUCHSIA; const void *pNext = {}; VULKAN_HPP_NAMESPACE::BufferCreateInfo createInfo = {}; VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures = {}; VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA) == sizeof(VkBufferConstraintsInfoFUCHSIA), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BufferConstraintsInfoFUCHSIA is not nothrow_move_constructible!"); template<> struct CppType { using Type = BufferConstraintsInfoFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ struct BufferCopy { using NativeType = VkBufferCopy; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR BufferCopy(VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT : srcOffset(srcOffset_), dstOffset(dstOffset_), size(size_) { } VULKAN_HPP_CONSTEXPR BufferCopy(BufferCopy const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferCopy(VkBufferCopy const &rhs) VULKAN_HPP_NOEXCEPT : BufferCopy(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferCopy &operator=(BufferCopy const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferCopy &operator=(VkBufferCopy const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BufferCopy &setSrcOffset(VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_) VULKAN_HPP_NOEXCEPT { srcOffset = srcOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCopy &setDstOffset(VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_) VULKAN_HPP_NOEXCEPT { dstOffset = dstOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCopy &setSize(VULKAN_HPP_NAMESPACE::DeviceSize size_) VULKAN_HPP_NOEXCEPT { size = size_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBufferCopy const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBufferCopy &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(srcOffset, dstOffset, size); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BufferCopy const &) const = default; #else bool operator==(BufferCopy const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (srcOffset == rhs.srcOffset) && (dstOffset == rhs.dstOffset) && (size == rhs.size); # endif } bool operator!=(BufferCopy const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {}; VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {}; VULKAN_HPP_NAMESPACE::DeviceSize size = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BufferCopy) == sizeof(VkBufferCopy), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BufferCopy is not nothrow_move_constructible!"); struct BufferCopy2 { using NativeType = VkBufferCopy2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCopy2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR BufferCopy2(VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), srcOffset(srcOffset_), dstOffset(dstOffset_), size(size_) { } VULKAN_HPP_CONSTEXPR BufferCopy2(BufferCopy2 const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferCopy2(VkBufferCopy2 const &rhs) VULKAN_HPP_NOEXCEPT : BufferCopy2(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferCopy2 &operator=(BufferCopy2 const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferCopy2 &operator=(VkBufferCopy2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BufferCopy2 &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCopy2 &setSrcOffset(VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_) VULKAN_HPP_NOEXCEPT { srcOffset = srcOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCopy2 &setDstOffset(VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_) VULKAN_HPP_NOEXCEPT { dstOffset = dstOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCopy2 &setSize(VULKAN_HPP_NAMESPACE::DeviceSize size_) VULKAN_HPP_NOEXCEPT { size = size_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBufferCopy2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBufferCopy2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, srcOffset, dstOffset, size); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BufferCopy2 const &) const = default; #else bool operator==(BufferCopy2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (srcOffset == rhs.srcOffset) && (dstOffset == rhs.dstOffset) && (size == rhs.size); # endif } bool operator!=(BufferCopy2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCopy2; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {}; VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {}; VULKAN_HPP_NAMESPACE::DeviceSize size = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BufferCopy2) == sizeof(VkBufferCopy2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BufferCopy2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = BufferCopy2; }; using BufferCopy2KHR = BufferCopy2; struct BufferDeviceAddressCreateInfoEXT { using NativeType = VkBufferDeviceAddressCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressCreateInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), deviceAddress(deviceAddress_) { } VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT(BufferDeviceAddressCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferDeviceAddressCreateInfoEXT(VkBufferDeviceAddressCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : BufferDeviceAddressCreateInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferDeviceAddressCreateInfoEXT &operator=(BufferDeviceAddressCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferDeviceAddressCreateInfoEXT &operator=(VkBufferDeviceAddressCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressCreateInfoEXT &setDeviceAddress(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_) VULKAN_HPP_NOEXCEPT { deviceAddress = deviceAddress_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBufferDeviceAddressCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBufferDeviceAddressCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, deviceAddress); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BufferDeviceAddressCreateInfoEXT const &) const = default; #else bool operator==(BufferDeviceAddressCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (deviceAddress == rhs.deviceAddress); # endif } bool operator!=(BufferDeviceAddressCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressCreateInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT) == sizeof(VkBufferDeviceAddressCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BufferDeviceAddressCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = BufferDeviceAddressCreateInfoEXT; }; struct BufferDeviceAddressInfo { using NativeType = VkBufferDeviceAddressInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), buffer(buffer_) { } VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo(BufferDeviceAddressInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferDeviceAddressInfo(VkBufferDeviceAddressInfo const &rhs) VULKAN_HPP_NOEXCEPT : BufferDeviceAddressInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferDeviceAddressInfo &operator=(BufferDeviceAddressInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferDeviceAddressInfo &operator=(VkBufferDeviceAddressInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressInfo &setBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer_) VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBufferDeviceAddressInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBufferDeviceAddressInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, buffer); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BufferDeviceAddressInfo const &) const = default; #else bool operator==(BufferDeviceAddressInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (buffer == rhs.buffer); # endif } bool operator!=(BufferDeviceAddressInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Buffer buffer = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo) == sizeof(VkBufferDeviceAddressInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BufferDeviceAddressInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = BufferDeviceAddressInfo; }; using BufferDeviceAddressInfoEXT = BufferDeviceAddressInfo; using BufferDeviceAddressInfoKHR = BufferDeviceAddressInfo; struct BufferImageCopy { using NativeType = VkBufferImageCopy; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR BufferImageCopy(VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, uint32_t bufferRowLength_ = {}, uint32_t bufferImageHeight_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}) VULKAN_HPP_NOEXCEPT : bufferOffset(bufferOffset_), bufferRowLength(bufferRowLength_), bufferImageHeight(bufferImageHeight_), imageSubresource(imageSubresource_), imageOffset(imageOffset_), imageExtent(imageExtent_) { } VULKAN_HPP_CONSTEXPR BufferImageCopy(BufferImageCopy const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferImageCopy(VkBufferImageCopy const &rhs) VULKAN_HPP_NOEXCEPT : BufferImageCopy(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferImageCopy &operator=(BufferImageCopy const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferImageCopy &operator=(VkBufferImageCopy const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BufferImageCopy &setBufferOffset(VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_) VULKAN_HPP_NOEXCEPT { bufferOffset = bufferOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy &setBufferRowLength(uint32_t bufferRowLength_) VULKAN_HPP_NOEXCEPT { bufferRowLength = bufferRowLength_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy &setBufferImageHeight(uint32_t bufferImageHeight_) VULKAN_HPP_NOEXCEPT { bufferImageHeight = bufferImageHeight_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy &setImageSubresource(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &imageSubresource_) VULKAN_HPP_NOEXCEPT { imageSubresource = imageSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy &setImageOffset(VULKAN_HPP_NAMESPACE::Offset3D const &imageOffset_) VULKAN_HPP_NOEXCEPT { imageOffset = imageOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy &setImageExtent(VULKAN_HPP_NAMESPACE::Extent3D const &imageExtent_) VULKAN_HPP_NOEXCEPT { imageExtent = imageExtent_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBufferImageCopy const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBufferImageCopy &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(bufferOffset, bufferRowLength, bufferImageHeight, imageSubresource, imageOffset, imageExtent); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BufferImageCopy const &) const = default; #else bool operator==(BufferImageCopy const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (bufferOffset == rhs.bufferOffset) && (bufferRowLength == rhs.bufferRowLength) && (bufferImageHeight == rhs.bufferImageHeight) && (imageSubresource == rhs.imageSubresource) && (imageOffset == rhs.imageOffset) && (imageExtent == rhs.imageExtent); # endif } bool operator!=(BufferImageCopy const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {}; uint32_t bufferRowLength = {}; uint32_t bufferImageHeight = {}; VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {}; VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {}; VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BufferImageCopy) == sizeof(VkBufferImageCopy), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BufferImageCopy is not nothrow_move_constructible!"); struct BufferImageCopy2 { using NativeType = VkBufferImageCopy2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferImageCopy2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR BufferImageCopy2(VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, uint32_t bufferRowLength_ = {}, uint32_t bufferImageHeight_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), bufferOffset(bufferOffset_), bufferRowLength(bufferRowLength_), bufferImageHeight(bufferImageHeight_), imageSubresource(imageSubresource_), imageOffset(imageOffset_), imageExtent(imageExtent_) { } VULKAN_HPP_CONSTEXPR BufferImageCopy2(BufferImageCopy2 const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferImageCopy2(VkBufferImageCopy2 const &rhs) VULKAN_HPP_NOEXCEPT : BufferImageCopy2(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferImageCopy2 &operator=(BufferImageCopy2 const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferImageCopy2 &operator=(VkBufferImageCopy2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 &setBufferOffset(VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_) VULKAN_HPP_NOEXCEPT { bufferOffset = bufferOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 &setBufferRowLength(uint32_t bufferRowLength_) VULKAN_HPP_NOEXCEPT { bufferRowLength = bufferRowLength_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 &setBufferImageHeight(uint32_t bufferImageHeight_) VULKAN_HPP_NOEXCEPT { bufferImageHeight = bufferImageHeight_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 &setImageSubresource(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &imageSubresource_) VULKAN_HPP_NOEXCEPT { imageSubresource = imageSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 &setImageOffset(VULKAN_HPP_NAMESPACE::Offset3D const &imageOffset_) VULKAN_HPP_NOEXCEPT { imageOffset = imageOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 &setImageExtent(VULKAN_HPP_NAMESPACE::Extent3D const &imageExtent_) VULKAN_HPP_NOEXCEPT { imageExtent = imageExtent_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBufferImageCopy2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBufferImageCopy2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, bufferOffset, bufferRowLength, bufferImageHeight, imageSubresource, imageOffset, imageExtent); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BufferImageCopy2 const &) const = default; #else bool operator==(BufferImageCopy2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (bufferOffset == rhs.bufferOffset) && (bufferRowLength == rhs.bufferRowLength) && (bufferImageHeight == rhs.bufferImageHeight) && (imageSubresource == rhs.imageSubresource) && (imageOffset == rhs.imageOffset) && (imageExtent == rhs.imageExtent); # endif } bool operator!=(BufferImageCopy2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferImageCopy2; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {}; uint32_t bufferRowLength = {}; uint32_t bufferImageHeight = {}; VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {}; VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {}; VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BufferImageCopy2) == sizeof(VkBufferImageCopy2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BufferImageCopy2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = BufferImageCopy2; }; using BufferImageCopy2KHR = BufferImageCopy2; struct BufferMemoryBarrier { using NativeType = VkBufferMemoryBarrier; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR BufferMemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), srcAccessMask(srcAccessMask_), dstAccessMask(dstAccessMask_), srcQueueFamilyIndex(srcQueueFamilyIndex_), dstQueueFamilyIndex(dstQueueFamilyIndex_), buffer(buffer_), offset(offset_), size(size_) { } VULKAN_HPP_CONSTEXPR BufferMemoryBarrier(BufferMemoryBarrier const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferMemoryBarrier(VkBufferMemoryBarrier const &rhs) VULKAN_HPP_NOEXCEPT : BufferMemoryBarrier(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferMemoryBarrier &operator=(BufferMemoryBarrier const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferMemoryBarrier &operator=(VkBufferMemoryBarrier const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier &setSrcAccessMask(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_) VULKAN_HPP_NOEXCEPT { srcAccessMask = srcAccessMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier &setDstAccessMask(VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_) VULKAN_HPP_NOEXCEPT { dstAccessMask = dstAccessMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier &setSrcQueueFamilyIndex(uint32_t srcQueueFamilyIndex_) VULKAN_HPP_NOEXCEPT { srcQueueFamilyIndex = srcQueueFamilyIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier &setDstQueueFamilyIndex(uint32_t dstQueueFamilyIndex_) VULKAN_HPP_NOEXCEPT { dstQueueFamilyIndex = dstQueueFamilyIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier &setBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer_) VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier &setOffset(VULKAN_HPP_NAMESPACE::DeviceSize offset_) VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier &setSize(VULKAN_HPP_NAMESPACE::DeviceSize size_) VULKAN_HPP_NOEXCEPT { size = size_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBufferMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBufferMemoryBarrier &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, srcAccessMask, dstAccessMask, srcQueueFamilyIndex, dstQueueFamilyIndex, buffer, offset, size); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BufferMemoryBarrier const &) const = default; #else bool operator==(BufferMemoryBarrier const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (srcAccessMask == rhs.srcAccessMask) && (dstAccessMask == rhs.dstAccessMask) && (srcQueueFamilyIndex == rhs.srcQueueFamilyIndex) && (dstQueueFamilyIndex == rhs.dstQueueFamilyIndex) && (buffer == rhs.buffer) && (offset == rhs.offset) && (size == rhs.size); # endif } bool operator!=(BufferMemoryBarrier const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier; const void *pNext = {}; VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; uint32_t srcQueueFamilyIndex = {}; uint32_t dstQueueFamilyIndex = {}; VULKAN_HPP_NAMESPACE::Buffer buffer = {}; VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; VULKAN_HPP_NAMESPACE::DeviceSize size = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BufferMemoryBarrier) == sizeof(VkBufferMemoryBarrier), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BufferMemoryBarrier is not nothrow_move_constructible!"); template<> struct CppType { using Type = BufferMemoryBarrier; }; struct BufferMemoryBarrier2 { using NativeType = VkBufferMemoryBarrier2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {}, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), srcStageMask(srcStageMask_), srcAccessMask(srcAccessMask_), dstStageMask(dstStageMask_), dstAccessMask(dstAccessMask_), srcQueueFamilyIndex(srcQueueFamilyIndex_), dstQueueFamilyIndex(dstQueueFamilyIndex_), buffer(buffer_), offset(offset_), size(size_) { } VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2(BufferMemoryBarrier2 const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferMemoryBarrier2(VkBufferMemoryBarrier2 const &rhs) VULKAN_HPP_NOEXCEPT : BufferMemoryBarrier2(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferMemoryBarrier2 &operator=(BufferMemoryBarrier2 const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferMemoryBarrier2 &operator=(VkBufferMemoryBarrier2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 &setSrcStageMask(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_) VULKAN_HPP_NOEXCEPT { srcStageMask = srcStageMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 &setSrcAccessMask(VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_) VULKAN_HPP_NOEXCEPT { srcAccessMask = srcAccessMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 &setDstStageMask(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_) VULKAN_HPP_NOEXCEPT { dstStageMask = dstStageMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 &setDstAccessMask(VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_) VULKAN_HPP_NOEXCEPT { dstAccessMask = dstAccessMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 &setSrcQueueFamilyIndex(uint32_t srcQueueFamilyIndex_) VULKAN_HPP_NOEXCEPT { srcQueueFamilyIndex = srcQueueFamilyIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 &setDstQueueFamilyIndex(uint32_t dstQueueFamilyIndex_) VULKAN_HPP_NOEXCEPT { dstQueueFamilyIndex = dstQueueFamilyIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 &setBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer_) VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 &setOffset(VULKAN_HPP_NAMESPACE::DeviceSize offset_) VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 &setSize(VULKAN_HPP_NAMESPACE::DeviceSize size_) VULKAN_HPP_NOEXCEPT { size = size_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBufferMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBufferMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, srcStageMask, srcAccessMask, dstStageMask, dstAccessMask, srcQueueFamilyIndex, dstQueueFamilyIndex, buffer, offset, size); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BufferMemoryBarrier2 const &) const = default; #else bool operator==(BufferMemoryBarrier2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (srcStageMask == rhs.srcStageMask) && (srcAccessMask == rhs.srcAccessMask) && (dstStageMask == rhs.dstStageMask) && (dstAccessMask == rhs.dstAccessMask) && (srcQueueFamilyIndex == rhs.srcQueueFamilyIndex) && (dstQueueFamilyIndex == rhs.dstQueueFamilyIndex) && (buffer == rhs.buffer) && (offset == rhs.offset) && (size == rhs.size); # endif } bool operator!=(BufferMemoryBarrier2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier2; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask = {}; VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask = {}; VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask = {}; VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask = {}; uint32_t srcQueueFamilyIndex = {}; uint32_t dstQueueFamilyIndex = {}; VULKAN_HPP_NAMESPACE::Buffer buffer = {}; VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; VULKAN_HPP_NAMESPACE::DeviceSize size = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2) == sizeof(VkBufferMemoryBarrier2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BufferMemoryBarrier2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = BufferMemoryBarrier2; }; using BufferMemoryBarrier2KHR = BufferMemoryBarrier2; struct BufferMemoryRequirementsInfo2 { using NativeType = VkBufferMemoryRequirementsInfo2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryRequirementsInfo2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), buffer(buffer_) { } VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2(BufferMemoryRequirementsInfo2 const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferMemoryRequirementsInfo2(VkBufferMemoryRequirementsInfo2 const &rhs) VULKAN_HPP_NOEXCEPT : BufferMemoryRequirementsInfo2(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferMemoryRequirementsInfo2 &operator=(BufferMemoryRequirementsInfo2 const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferMemoryRequirementsInfo2 &operator=(VkBufferMemoryRequirementsInfo2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BufferMemoryRequirementsInfo2 &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryRequirementsInfo2 &setBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer_) VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBufferMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBufferMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, buffer); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BufferMemoryRequirementsInfo2 const &) const = default; #else bool operator==(BufferMemoryRequirementsInfo2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (buffer == rhs.buffer); # endif } bool operator!=(BufferMemoryRequirementsInfo2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryRequirementsInfo2; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Buffer buffer = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2) == sizeof(VkBufferMemoryRequirementsInfo2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BufferMemoryRequirementsInfo2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = BufferMemoryRequirementsInfo2; }; using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2; struct BufferOpaqueCaptureAddressCreateInfo { using NativeType = VkBufferOpaqueCaptureAddressCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferOpaqueCaptureAddressCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo(uint64_t opaqueCaptureAddress_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), opaqueCaptureAddress(opaqueCaptureAddress_) { } VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo(BufferOpaqueCaptureAddressCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferOpaqueCaptureAddressCreateInfo(VkBufferOpaqueCaptureAddressCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : BufferOpaqueCaptureAddressCreateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferOpaqueCaptureAddressCreateInfo &operator=(BufferOpaqueCaptureAddressCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferOpaqueCaptureAddressCreateInfo &operator=(VkBufferOpaqueCaptureAddressCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BufferOpaqueCaptureAddressCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferOpaqueCaptureAddressCreateInfo &setOpaqueCaptureAddress(uint64_t opaqueCaptureAddress_) VULKAN_HPP_NOEXCEPT { opaqueCaptureAddress = opaqueCaptureAddress_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBufferOpaqueCaptureAddressCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBufferOpaqueCaptureAddressCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, opaqueCaptureAddress); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BufferOpaqueCaptureAddressCreateInfo const &) const = default; #else bool operator==(BufferOpaqueCaptureAddressCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (opaqueCaptureAddress == rhs.opaqueCaptureAddress); # endif } bool operator!=(BufferOpaqueCaptureAddressCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferOpaqueCaptureAddressCreateInfo; const void *pNext = {}; uint64_t opaqueCaptureAddress = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo) == sizeof(VkBufferOpaqueCaptureAddressCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BufferOpaqueCaptureAddressCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = BufferOpaqueCaptureAddressCreateInfo; }; using BufferOpaqueCaptureAddressCreateInfoKHR = BufferOpaqueCaptureAddressCreateInfo; struct BufferViewCreateInfo { using NativeType = VkBufferViewCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferViewCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR BufferViewCreateInfo(VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize range_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), buffer(buffer_), format(format_), offset(offset_), range(range_) { } VULKAN_HPP_CONSTEXPR BufferViewCreateInfo(BufferViewCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferViewCreateInfo(VkBufferViewCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : BufferViewCreateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferViewCreateInfo &operator=(BufferViewCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; BufferViewCreateInfo &operator=(VkBufferViewCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo &setFlags(VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo &setBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer_) VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo &setFormat(VULKAN_HPP_NAMESPACE::Format format_) VULKAN_HPP_NOEXCEPT { format = format_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo &setOffset(VULKAN_HPP_NAMESPACE::DeviceSize offset_) VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo &setRange(VULKAN_HPP_NAMESPACE::DeviceSize range_) VULKAN_HPP_NOEXCEPT { range = range_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkBufferViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkBufferViewCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, buffer, format, offset, range); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(BufferViewCreateInfo const &) const = default; #else bool operator==(BufferViewCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (buffer == rhs.buffer) && (format == rhs.format) && (offset == rhs.offset) && (range == rhs.range); # endif } bool operator!=(BufferViewCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferViewCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags = {}; VULKAN_HPP_NAMESPACE::Buffer buffer = {}; VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; VULKAN_HPP_NAMESPACE::DeviceSize range = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::BufferViewCreateInfo) == sizeof(VkBufferViewCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "BufferViewCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = BufferViewCreateInfo; }; struct CalibratedTimestampInfoEXT { using NativeType = VkCalibratedTimestampInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCalibratedTimestampInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoEXT(VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), timeDomain(timeDomain_) { } VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoEXT(CalibratedTimestampInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; CalibratedTimestampInfoEXT(VkCalibratedTimestampInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : CalibratedTimestampInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ CalibratedTimestampInfoEXT &operator=(CalibratedTimestampInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; CalibratedTimestampInfoEXT &operator=(VkCalibratedTimestampInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 CalibratedTimestampInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CalibratedTimestampInfoEXT &setTimeDomain(VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_) VULKAN_HPP_NOEXCEPT { timeDomain = timeDomain_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkCalibratedTimestampInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkCalibratedTimestampInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, timeDomain); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(CalibratedTimestampInfoEXT const &) const = default; #else bool operator==(CalibratedTimestampInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (timeDomain == rhs.timeDomain); # endif } bool operator!=(CalibratedTimestampInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCalibratedTimestampInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT) == sizeof(VkCalibratedTimestampInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "CalibratedTimestampInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = CalibratedTimestampInfoEXT; }; struct CheckpointData2NV { using NativeType = VkCheckpointData2NV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointData2NV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR CheckpointData2NV(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage_ = {}, void *pCheckpointMarker_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), stage(stage_), pCheckpointMarker(pCheckpointMarker_) { } VULKAN_HPP_CONSTEXPR CheckpointData2NV(CheckpointData2NV const &rhs) VULKAN_HPP_NOEXCEPT = default; CheckpointData2NV(VkCheckpointData2NV const &rhs) VULKAN_HPP_NOEXCEPT : CheckpointData2NV(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ CheckpointData2NV &operator=(CheckpointData2NV const &rhs) VULKAN_HPP_NOEXCEPT = default; CheckpointData2NV &operator=(VkCheckpointData2NV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkCheckpointData2NV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkCheckpointData2NV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, stage, pCheckpointMarker); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(CheckpointData2NV const &) const = default; #else bool operator==(CheckpointData2NV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (stage == rhs.stage) && (pCheckpointMarker == rhs.pCheckpointMarker); # endif } bool operator!=(CheckpointData2NV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointData2NV; void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage = {}; void *pCheckpointMarker = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::CheckpointData2NV) == sizeof(VkCheckpointData2NV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "CheckpointData2NV is not nothrow_move_constructible!"); template<> struct CppType { using Type = CheckpointData2NV; }; struct CheckpointDataNV { using NativeType = VkCheckpointDataNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointDataNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR CheckpointDataNV(VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe, void *pCheckpointMarker_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), stage(stage_), pCheckpointMarker(pCheckpointMarker_) { } VULKAN_HPP_CONSTEXPR CheckpointDataNV(CheckpointDataNV const &rhs) VULKAN_HPP_NOEXCEPT = default; CheckpointDataNV(VkCheckpointDataNV const &rhs) VULKAN_HPP_NOEXCEPT : CheckpointDataNV(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ CheckpointDataNV &operator=(CheckpointDataNV const &rhs) VULKAN_HPP_NOEXCEPT = default; CheckpointDataNV &operator=(VkCheckpointDataNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkCheckpointDataNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkCheckpointDataNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, stage, pCheckpointMarker); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(CheckpointDataNV const &) const = default; #else bool operator==(CheckpointDataNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (stage == rhs.stage) && (pCheckpointMarker == rhs.pCheckpointMarker); # endif } bool operator!=(CheckpointDataNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointDataNV; void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe; void *pCheckpointMarker = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::CheckpointDataNV) == sizeof(VkCheckpointDataNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "CheckpointDataNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = CheckpointDataNV; }; union ClearColorValue { using NativeType = VkClearColorValue; #if !defined(VULKAN_HPP_NO_UNION_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 ClearColorValue(const std::array &float32_ = {}) : float32(float32_) {} VULKAN_HPP_CONSTEXPR_14 ClearColorValue(const std::array &int32_) : int32(int32_) {} VULKAN_HPP_CONSTEXPR_14 ClearColorValue(const std::array &uint32_) : uint32(uint32_) {} #endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ #if !defined(VULKAN_HPP_NO_UNION_SETTERS) VULKAN_HPP_CONSTEXPR_14 ClearColorValue &setFloat32(std::array float32_) VULKAN_HPP_NOEXCEPT { float32 = float32_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClearColorValue &setInt32(std::array int32_) VULKAN_HPP_NOEXCEPT { int32 = int32_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClearColorValue &setUint32(std::array uint32_) VULKAN_HPP_NOEXCEPT { uint32 = uint32_; return *this; } #endif /*VULKAN_HPP_NO_UNION_SETTERS*/ operator VkClearColorValue const &() const { return *reinterpret_cast(this); } operator VkClearColorValue &() { return *reinterpret_cast(this); } VULKAN_HPP_NAMESPACE::ArrayWrapper1D float32; VULKAN_HPP_NAMESPACE::ArrayWrapper1D int32; VULKAN_HPP_NAMESPACE::ArrayWrapper1D uint32; }; struct ClearDepthStencilValue { using NativeType = VkClearDepthStencilValue; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ClearDepthStencilValue(float depth_ = {}, uint32_t stencil_ = {}) VULKAN_HPP_NOEXCEPT : depth(depth_), stencil(stencil_) { } VULKAN_HPP_CONSTEXPR ClearDepthStencilValue(ClearDepthStencilValue const &rhs) VULKAN_HPP_NOEXCEPT = default; ClearDepthStencilValue(VkClearDepthStencilValue const &rhs) VULKAN_HPP_NOEXCEPT : ClearDepthStencilValue(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ClearDepthStencilValue &operator=(ClearDepthStencilValue const &rhs) VULKAN_HPP_NOEXCEPT = default; ClearDepthStencilValue &operator=(VkClearDepthStencilValue const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ClearDepthStencilValue &setDepth(float depth_) VULKAN_HPP_NOEXCEPT { depth = depth_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClearDepthStencilValue &setStencil(uint32_t stencil_) VULKAN_HPP_NOEXCEPT { stencil = stencil_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkClearDepthStencilValue const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkClearDepthStencilValue &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(depth, stencil); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ClearDepthStencilValue const &) const = default; #else bool operator==(ClearDepthStencilValue const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (depth == rhs.depth) && (stencil == rhs.stencil); # endif } bool operator!=(ClearDepthStencilValue const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: float depth = {}; uint32_t stencil = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ClearDepthStencilValue) == sizeof(VkClearDepthStencilValue), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ClearDepthStencilValue is not nothrow_move_constructible!"); union ClearValue { using NativeType = VkClearValue; #if !defined(VULKAN_HPP_NO_UNION_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 ClearValue(VULKAN_HPP_NAMESPACE::ClearColorValue color_ = {}) : color(color_) {} VULKAN_HPP_CONSTEXPR_14 ClearValue(VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil_) : depthStencil(depthStencil_) {} #endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ #if !defined(VULKAN_HPP_NO_UNION_SETTERS) VULKAN_HPP_CONSTEXPR_14 ClearValue &setColor(VULKAN_HPP_NAMESPACE::ClearColorValue const &color_) VULKAN_HPP_NOEXCEPT { color = color_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClearValue &setDepthStencil(VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const &depthStencil_) VULKAN_HPP_NOEXCEPT { depthStencil = depthStencil_; return *this; } #endif /*VULKAN_HPP_NO_UNION_SETTERS*/ operator VkClearValue const &() const { return *reinterpret_cast(this); } operator VkClearValue &() { return *reinterpret_cast(this); } #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS VULKAN_HPP_NAMESPACE::ClearColorValue color; VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil; #else VkClearColorValue color; VkClearDepthStencilValue depthStencil; #endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; struct ClearAttachment { using NativeType = VkClearAttachment; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 ClearAttachment(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t colorAttachment_ = {}, VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {}) VULKAN_HPP_NOEXCEPT : aspectMask(aspectMask_), colorAttachment(colorAttachment_), clearValue(clearValue_) { } VULKAN_HPP_CONSTEXPR_14 ClearAttachment(ClearAttachment const &rhs) VULKAN_HPP_NOEXCEPT = default; ClearAttachment(VkClearAttachment const &rhs) VULKAN_HPP_NOEXCEPT : ClearAttachment(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ClearAttachment &operator=(ClearAttachment const &rhs) VULKAN_HPP_NOEXCEPT = default; ClearAttachment &operator=(VkClearAttachment const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ClearAttachment &setAspectMask(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_) VULKAN_HPP_NOEXCEPT { aspectMask = aspectMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClearAttachment &setColorAttachment(uint32_t colorAttachment_) VULKAN_HPP_NOEXCEPT { colorAttachment = colorAttachment_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClearAttachment &setClearValue(VULKAN_HPP_NAMESPACE::ClearValue const &clearValue_) VULKAN_HPP_NOEXCEPT { clearValue = clearValue_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkClearAttachment const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkClearAttachment &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(aspectMask, colorAttachment, clearValue); } #endif public: VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; uint32_t colorAttachment = {}; VULKAN_HPP_NAMESPACE::ClearValue clearValue = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ClearAttachment) == sizeof(VkClearAttachment), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ClearAttachment is not nothrow_move_constructible!"); struct ClearRect { using NativeType = VkClearRect; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ClearRect(VULKAN_HPP_NAMESPACE::Rect2D rect_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {}) VULKAN_HPP_NOEXCEPT : rect(rect_), baseArrayLayer(baseArrayLayer_), layerCount(layerCount_) { } VULKAN_HPP_CONSTEXPR ClearRect(ClearRect const &rhs) VULKAN_HPP_NOEXCEPT = default; ClearRect(VkClearRect const &rhs) VULKAN_HPP_NOEXCEPT : ClearRect(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ClearRect &operator=(ClearRect const &rhs) VULKAN_HPP_NOEXCEPT = default; ClearRect &operator=(VkClearRect const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ClearRect &setRect(VULKAN_HPP_NAMESPACE::Rect2D const &rect_) VULKAN_HPP_NOEXCEPT { rect = rect_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClearRect &setBaseArrayLayer(uint32_t baseArrayLayer_) VULKAN_HPP_NOEXCEPT { baseArrayLayer = baseArrayLayer_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClearRect &setLayerCount(uint32_t layerCount_) VULKAN_HPP_NOEXCEPT { layerCount = layerCount_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkClearRect const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkClearRect &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(rect, baseArrayLayer, layerCount); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ClearRect const &) const = default; #else bool operator==(ClearRect const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (rect == rhs.rect) && (baseArrayLayer == rhs.baseArrayLayer) && (layerCount == rhs.layerCount); # endif } bool operator!=(ClearRect const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::Rect2D rect = {}; uint32_t baseArrayLayer = {}; uint32_t layerCount = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ClearRect) == sizeof(VkClearRect), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ClearRect is not nothrow_move_constructible!"); struct CoarseSampleLocationNV { using NativeType = VkCoarseSampleLocationNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV(uint32_t pixelX_ = {}, uint32_t pixelY_ = {}, uint32_t sample_ = {}) VULKAN_HPP_NOEXCEPT : pixelX(pixelX_), pixelY(pixelY_), sample(sample_) { } VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV(CoarseSampleLocationNV const &rhs) VULKAN_HPP_NOEXCEPT = default; CoarseSampleLocationNV(VkCoarseSampleLocationNV const &rhs) VULKAN_HPP_NOEXCEPT : CoarseSampleLocationNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ CoarseSampleLocationNV &operator=(CoarseSampleLocationNV const &rhs) VULKAN_HPP_NOEXCEPT = default; CoarseSampleLocationNV &operator=(VkCoarseSampleLocationNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV &setPixelX(uint32_t pixelX_) VULKAN_HPP_NOEXCEPT { pixelX = pixelX_; return *this; } VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV &setPixelY(uint32_t pixelY_) VULKAN_HPP_NOEXCEPT { pixelY = pixelY_; return *this; } VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV &setSample(uint32_t sample_) VULKAN_HPP_NOEXCEPT { sample = sample_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkCoarseSampleLocationNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkCoarseSampleLocationNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(pixelX, pixelY, sample); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(CoarseSampleLocationNV const &) const = default; #else bool operator==(CoarseSampleLocationNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (pixelX == rhs.pixelX) && (pixelY == rhs.pixelY) && (sample == rhs.sample); # endif } bool operator!=(CoarseSampleLocationNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t pixelX = {}; uint32_t pixelY = {}; uint32_t sample = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV) == sizeof(VkCoarseSampleLocationNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "CoarseSampleLocationNV is not nothrow_move_constructible!"); struct CoarseSampleOrderCustomNV { using NativeType = VkCoarseSampleOrderCustomNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV(VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations, uint32_t sampleCount_ = {}, uint32_t sampleLocationCount_ = {}, const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV *pSampleLocations_ = {}) VULKAN_HPP_NOEXCEPT : shadingRate(shadingRate_), sampleCount(sampleCount_), sampleLocationCount(sampleLocationCount_), pSampleLocations(pSampleLocations_) { } VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV(CoarseSampleOrderCustomNV const &rhs) VULKAN_HPP_NOEXCEPT = default; CoarseSampleOrderCustomNV(VkCoarseSampleOrderCustomNV const &rhs) VULKAN_HPP_NOEXCEPT : CoarseSampleOrderCustomNV(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) CoarseSampleOrderCustomNV(VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_, uint32_t sampleCount_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &sampleLocations_) : shadingRate(shadingRate_) , sampleCount(sampleCount_) , sampleLocationCount(static_cast(sampleLocations_.size())) , pSampleLocations(sampleLocations_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ CoarseSampleOrderCustomNV &operator=(CoarseSampleOrderCustomNV const &rhs) VULKAN_HPP_NOEXCEPT = default; CoarseSampleOrderCustomNV &operator=(VkCoarseSampleOrderCustomNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV &setShadingRate(VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_) VULKAN_HPP_NOEXCEPT { shadingRate = shadingRate_; return *this; } VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV &setSampleCount(uint32_t sampleCount_) VULKAN_HPP_NOEXCEPT { sampleCount = sampleCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV &setSampleLocationCount(uint32_t sampleLocationCount_) VULKAN_HPP_NOEXCEPT { sampleLocationCount = sampleLocationCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setPSampleLocations(const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV *pSampleLocations_) VULKAN_HPP_NOEXCEPT { pSampleLocations = pSampleLocations_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) CoarseSampleOrderCustomNV &setSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &sampleLocations_) VULKAN_HPP_NOEXCEPT { sampleLocationCount = static_cast(sampleLocations_.size()); pSampleLocations = sampleLocations_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkCoarseSampleOrderCustomNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkCoarseSampleOrderCustomNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(shadingRate, sampleCount, sampleLocationCount, pSampleLocations); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(CoarseSampleOrderCustomNV const &) const = default; #else bool operator==(CoarseSampleOrderCustomNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (shadingRate == rhs.shadingRate) && (sampleCount == rhs.sampleCount) && (sampleLocationCount == rhs.sampleLocationCount) && (pSampleLocations == rhs.pSampleLocations); # endif } bool operator!=(CoarseSampleOrderCustomNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations; uint32_t sampleCount = {}; uint32_t sampleLocationCount = {}; const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV *pSampleLocations = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV) == sizeof(VkCoarseSampleOrderCustomNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "CoarseSampleOrderCustomNV is not nothrow_move_constructible!"); struct CommandBufferAllocateInfo { using NativeType = VkCommandBufferAllocateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferAllocateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo(VULKAN_HPP_NAMESPACE::CommandPool commandPool_ = {}, VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary, uint32_t commandBufferCount_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), commandPool(commandPool_), level(level_), commandBufferCount(commandBufferCount_) { } VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo(CommandBufferAllocateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; CommandBufferAllocateInfo(VkCommandBufferAllocateInfo const &rhs) VULKAN_HPP_NOEXCEPT : CommandBufferAllocateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ CommandBufferAllocateInfo &operator=(CommandBufferAllocateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; CommandBufferAllocateInfo &operator=(VkCommandBufferAllocateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo &setCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool_) VULKAN_HPP_NOEXCEPT { commandPool = commandPool_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo &setLevel(VULKAN_HPP_NAMESPACE::CommandBufferLevel level_) VULKAN_HPP_NOEXCEPT { level = level_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo &setCommandBufferCount(uint32_t commandBufferCount_) VULKAN_HPP_NOEXCEPT { commandBufferCount = commandBufferCount_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkCommandBufferAllocateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkCommandBufferAllocateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, commandPool, level, commandBufferCount); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(CommandBufferAllocateInfo const &) const = default; #else bool operator==(CommandBufferAllocateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (commandPool == rhs.commandPool) && (level == rhs.level) && (commandBufferCount == rhs.commandBufferCount); # endif } bool operator!=(CommandBufferAllocateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferAllocateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::CommandPool commandPool = {}; VULKAN_HPP_NAMESPACE::CommandBufferLevel level = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary; uint32_t commandBufferCount = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo) == sizeof(VkCommandBufferAllocateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "CommandBufferAllocateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = CommandBufferAllocateInfo; }; struct CommandBufferInheritanceInfo { using NativeType = VkCommandBufferInheritanceInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo(VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t subpass_ = {}, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ = {}, VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ = {}, VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), renderPass(renderPass_), subpass(subpass_), framebuffer(framebuffer_), occlusionQueryEnable(occlusionQueryEnable_), queryFlags(queryFlags_), pipelineStatistics(pipelineStatistics_) { } VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo(CommandBufferInheritanceInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; CommandBufferInheritanceInfo(VkCommandBufferInheritanceInfo const &rhs) VULKAN_HPP_NOEXCEPT : CommandBufferInheritanceInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ CommandBufferInheritanceInfo &operator=(CommandBufferInheritanceInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; CommandBufferInheritanceInfo &operator=(VkCommandBufferInheritanceInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo &setRenderPass(VULKAN_HPP_NAMESPACE::RenderPass renderPass_) VULKAN_HPP_NOEXCEPT { renderPass = renderPass_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo &setSubpass(uint32_t subpass_) VULKAN_HPP_NOEXCEPT { subpass = subpass_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo &setFramebuffer(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_) VULKAN_HPP_NOEXCEPT { framebuffer = framebuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo &setOcclusionQueryEnable(VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_) VULKAN_HPP_NOEXCEPT { occlusionQueryEnable = occlusionQueryEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo &setQueryFlags(VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_) VULKAN_HPP_NOEXCEPT { queryFlags = queryFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setPipelineStatistics(VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_) VULKAN_HPP_NOEXCEPT { pipelineStatistics = pipelineStatistics_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkCommandBufferInheritanceInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkCommandBufferInheritanceInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, renderPass, subpass, framebuffer, occlusionQueryEnable, queryFlags, pipelineStatistics); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(CommandBufferInheritanceInfo const &) const = default; #else bool operator==(CommandBufferInheritanceInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (renderPass == rhs.renderPass) && (subpass == rhs.subpass) && (framebuffer == rhs.framebuffer) && (occlusionQueryEnable == rhs.occlusionQueryEnable) && (queryFlags == rhs.queryFlags) && (pipelineStatistics == rhs.pipelineStatistics); # endif } bool operator!=(CommandBufferInheritanceInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; uint32_t subpass = {}; VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {}; VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable = {}; VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags = {}; VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo) == sizeof(VkCommandBufferInheritanceInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "CommandBufferInheritanceInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = CommandBufferInheritanceInfo; }; struct CommandBufferBeginInfo { using NativeType = VkCommandBufferBeginInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferBeginInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo(VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ = {}, const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo *pInheritanceInfo_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), pInheritanceInfo(pInheritanceInfo_) { } VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo(CommandBufferBeginInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; CommandBufferBeginInfo(VkCommandBufferBeginInfo const &rhs) VULKAN_HPP_NOEXCEPT : CommandBufferBeginInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ CommandBufferBeginInfo &operator=(CommandBufferBeginInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; CommandBufferBeginInfo &operator=(VkCommandBufferBeginInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo &setFlags(VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & setPInheritanceInfo(const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo *pInheritanceInfo_) VULKAN_HPP_NOEXCEPT { pInheritanceInfo = pInheritanceInfo_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, pInheritanceInfo); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(CommandBufferBeginInfo const &) const = default; #else bool operator==(CommandBufferBeginInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (pInheritanceInfo == rhs.pInheritanceInfo); # endif } bool operator!=(CommandBufferBeginInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferBeginInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags = {}; const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo *pInheritanceInfo = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo) == sizeof(VkCommandBufferBeginInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "CommandBufferBeginInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = CommandBufferBeginInfo; }; struct CommandBufferInheritanceConditionalRenderingInfoEXT { using NativeType = VkCommandBufferInheritanceConditionalRenderingInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), conditionalRenderingEnable(conditionalRenderingEnable_) { } VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT(CommandBufferInheritanceConditionalRenderingInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; CommandBufferInheritanceConditionalRenderingInfoEXT(VkCommandBufferInheritanceConditionalRenderingInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : CommandBufferInheritanceConditionalRenderingInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ CommandBufferInheritanceConditionalRenderingInfoEXT & operator=(CommandBufferInheritanceConditionalRenderingInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; CommandBufferInheritanceConditionalRenderingInfoEXT &operator=(VkCommandBufferInheritanceConditionalRenderingInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT & setConditionalRenderingEnable(VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_) VULKAN_HPP_NOEXCEPT { conditionalRenderingEnable = conditionalRenderingEnable_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkCommandBufferInheritanceConditionalRenderingInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, conditionalRenderingEnable); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(CommandBufferInheritanceConditionalRenderingInfoEXT const &) const = default; #else bool operator==(CommandBufferInheritanceConditionalRenderingInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (conditionalRenderingEnable == rhs.conditionalRenderingEnable); # endif } bool operator!=(CommandBufferInheritanceConditionalRenderingInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT) == sizeof(VkCommandBufferInheritanceConditionalRenderingInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "CommandBufferInheritanceConditionalRenderingInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = CommandBufferInheritanceConditionalRenderingInfoEXT; }; struct CommandBufferInheritanceRenderPassTransformInfoQCOM { using NativeType = VkCommandBufferInheritanceRenderPassTransformInfoQCOM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), transform(transform_), renderArea(renderArea_) { } VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM(CommandBufferInheritanceRenderPassTransformInfoQCOM const &rhs) VULKAN_HPP_NOEXCEPT = default; CommandBufferInheritanceRenderPassTransformInfoQCOM(VkCommandBufferInheritanceRenderPassTransformInfoQCOM const &rhs) VULKAN_HPP_NOEXCEPT : CommandBufferInheritanceRenderPassTransformInfoQCOM(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ CommandBufferInheritanceRenderPassTransformInfoQCOM & operator=(CommandBufferInheritanceRenderPassTransformInfoQCOM const &rhs) VULKAN_HPP_NOEXCEPT = default; CommandBufferInheritanceRenderPassTransformInfoQCOM &operator=(VkCommandBufferInheritanceRenderPassTransformInfoQCOM const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & setTransform(VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_) VULKAN_HPP_NOEXCEPT { transform = transform_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & setRenderArea(VULKAN_HPP_NAMESPACE::Rect2D const &renderArea_) VULKAN_HPP_NOEXCEPT { renderArea = renderArea_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, transform, renderArea); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(CommandBufferInheritanceRenderPassTransformInfoQCOM const &) const = default; #else bool operator==(CommandBufferInheritanceRenderPassTransformInfoQCOM const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (transform == rhs.transform) && (renderArea == rhs.renderArea); # endif } bool operator!=(CommandBufferInheritanceRenderPassTransformInfoQCOM const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM; void *pNext = {}; VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; VULKAN_HPP_NAMESPACE::Rect2D renderArea = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM) == sizeof(VkCommandBufferInheritanceRenderPassTransformInfoQCOM), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "CommandBufferInheritanceRenderPassTransformInfoQCOM is not nothrow_move_constructible!"); template<> struct CppType { using Type = CommandBufferInheritanceRenderPassTransformInfoQCOM; }; struct CommandBufferInheritanceRenderingInfo { using NativeType = VkCommandBufferInheritanceRenderingInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceRenderingInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderingInfo(VULKAN_HPP_NAMESPACE::RenderingFlags flags_ = {}, uint32_t viewMask_ = {}, uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::Format *pColorAttachmentFormats_ = {}, VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), viewMask(viewMask_), colorAttachmentCount(colorAttachmentCount_), pColorAttachmentFormats(pColorAttachmentFormats_), depthAttachmentFormat(depthAttachmentFormat_), stencilAttachmentFormat(stencilAttachmentFormat_), rasterizationSamples(rasterizationSamples_) { } VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderingInfo(CommandBufferInheritanceRenderingInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; CommandBufferInheritanceRenderingInfo(VkCommandBufferInheritanceRenderingInfo const &rhs) VULKAN_HPP_NOEXCEPT : CommandBufferInheritanceRenderingInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) CommandBufferInheritanceRenderingInfo(VULKAN_HPP_NAMESPACE::RenderingFlags flags_, uint32_t viewMask_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &colorAttachmentFormats_, VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , viewMask(viewMask_) , colorAttachmentCount(static_cast(colorAttachmentFormats_.size())) , pColorAttachmentFormats(colorAttachmentFormats_.data()) , depthAttachmentFormat(depthAttachmentFormat_) , stencilAttachmentFormat(stencilAttachmentFormat_) , rasterizationSamples(rasterizationSamples_) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ CommandBufferInheritanceRenderingInfo &operator=(CommandBufferInheritanceRenderingInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; CommandBufferInheritanceRenderingInfo &operator=(VkCommandBufferInheritanceRenderingInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo &setFlags(VULKAN_HPP_NAMESPACE::RenderingFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo &setViewMask(uint32_t viewMask_) VULKAN_HPP_NOEXCEPT { viewMask = viewMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo &setColorAttachmentCount(uint32_t colorAttachmentCount_) VULKAN_HPP_NOEXCEPT { colorAttachmentCount = colorAttachmentCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setPColorAttachmentFormats(const VULKAN_HPP_NAMESPACE::Format *pColorAttachmentFormats_) VULKAN_HPP_NOEXCEPT { pColorAttachmentFormats = pColorAttachmentFormats_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) CommandBufferInheritanceRenderingInfo &setColorAttachmentFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &colorAttachmentFormats_) VULKAN_HPP_NOEXCEPT { colorAttachmentCount = static_cast(colorAttachmentFormats_.size()); pColorAttachmentFormats = colorAttachmentFormats_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setDepthAttachmentFormat(VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_) VULKAN_HPP_NOEXCEPT { depthAttachmentFormat = depthAttachmentFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setStencilAttachmentFormat(VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_) VULKAN_HPP_NOEXCEPT { stencilAttachmentFormat = stencilAttachmentFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setRasterizationSamples(VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_) VULKAN_HPP_NOEXCEPT { rasterizationSamples = rasterizationSamples_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkCommandBufferInheritanceRenderingInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkCommandBufferInheritanceRenderingInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, viewMask, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat, rasterizationSamples); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(CommandBufferInheritanceRenderingInfo const &) const = default; #else bool operator==(CommandBufferInheritanceRenderingInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (viewMask == rhs.viewMask) && (colorAttachmentCount == rhs.colorAttachmentCount) && (pColorAttachmentFormats == rhs.pColorAttachmentFormats) && (depthAttachmentFormat == rhs.depthAttachmentFormat) && (stencilAttachmentFormat == rhs.stencilAttachmentFormat) && (rasterizationSamples == rhs.rasterizationSamples); # endif } bool operator!=(CommandBufferInheritanceRenderingInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderingInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::RenderingFlags flags = {}; uint32_t viewMask = {}; uint32_t colorAttachmentCount = {}; const VULKAN_HPP_NAMESPACE::Format *pColorAttachmentFormats = {}; VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo) == sizeof(VkCommandBufferInheritanceRenderingInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "CommandBufferInheritanceRenderingInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = CommandBufferInheritanceRenderingInfo; }; using CommandBufferInheritanceRenderingInfoKHR = CommandBufferInheritanceRenderingInfo; struct Viewport { using NativeType = VkViewport; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR Viewport(float x_ = {}, float y_ = {}, float width_ = {}, float height_ = {}, float minDepth_ = {}, float maxDepth_ = {}) VULKAN_HPP_NOEXCEPT : x(x_), y(y_), width(width_), height(height_), minDepth(minDepth_), maxDepth(maxDepth_) { } VULKAN_HPP_CONSTEXPR Viewport(Viewport const &rhs) VULKAN_HPP_NOEXCEPT = default; Viewport(VkViewport const &rhs) VULKAN_HPP_NOEXCEPT : Viewport(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ Viewport &operator=(Viewport const &rhs) VULKAN_HPP_NOEXCEPT = default; Viewport &operator=(VkViewport const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 Viewport &setX(float x_) VULKAN_HPP_NOEXCEPT { x = x_; return *this; } VULKAN_HPP_CONSTEXPR_14 Viewport &setY(float y_) VULKAN_HPP_NOEXCEPT { y = y_; return *this; } VULKAN_HPP_CONSTEXPR_14 Viewport &setWidth(float width_) VULKAN_HPP_NOEXCEPT { width = width_; return *this; } VULKAN_HPP_CONSTEXPR_14 Viewport &setHeight(float height_) VULKAN_HPP_NOEXCEPT { height = height_; return *this; } VULKAN_HPP_CONSTEXPR_14 Viewport &setMinDepth(float minDepth_) VULKAN_HPP_NOEXCEPT { minDepth = minDepth_; return *this; } VULKAN_HPP_CONSTEXPR_14 Viewport &setMaxDepth(float maxDepth_) VULKAN_HPP_NOEXCEPT { maxDepth = maxDepth_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkViewport const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkViewport &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(x, y, width, height, minDepth, maxDepth); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(Viewport const &) const = default; #else bool operator==(Viewport const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (x == rhs.x) && (y == rhs.y) && (width == rhs.width) && (height == rhs.height) && (minDepth == rhs.minDepth) && (maxDepth == rhs.maxDepth); # endif } bool operator!=(Viewport const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: float x = {}; float y = {}; float width = {}; float height = {}; float minDepth = {}; float maxDepth = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::Viewport) == sizeof(VkViewport), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "Viewport is not nothrow_move_constructible!"); struct CommandBufferInheritanceViewportScissorInfoNV { using NativeType = VkCommandBufferInheritanceViewportScissorInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceViewportScissorInfoNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR CommandBufferInheritanceViewportScissorInfoNV(VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D_ = {}, uint32_t viewportDepthCount_ = {}, const VULKAN_HPP_NAMESPACE::Viewport *pViewportDepths_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), viewportScissor2D(viewportScissor2D_), viewportDepthCount(viewportDepthCount_), pViewportDepths(pViewportDepths_) { } VULKAN_HPP_CONSTEXPR CommandBufferInheritanceViewportScissorInfoNV(CommandBufferInheritanceViewportScissorInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; CommandBufferInheritanceViewportScissorInfoNV(VkCommandBufferInheritanceViewportScissorInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : CommandBufferInheritanceViewportScissorInfoNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ CommandBufferInheritanceViewportScissorInfoNV &operator=(CommandBufferInheritanceViewportScissorInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; CommandBufferInheritanceViewportScissorInfoNV &operator=(VkCommandBufferInheritanceViewportScissorInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setViewportScissor2D(VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D_) VULKAN_HPP_NOEXCEPT { viewportScissor2D = viewportScissor2D_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV &setViewportDepthCount(uint32_t viewportDepthCount_) VULKAN_HPP_NOEXCEPT { viewportDepthCount = viewportDepthCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setPViewportDepths(const VULKAN_HPP_NAMESPACE::Viewport *pViewportDepths_) VULKAN_HPP_NOEXCEPT { pViewportDepths = pViewportDepths_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkCommandBufferInheritanceViewportScissorInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkCommandBufferInheritanceViewportScissorInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, viewportScissor2D, viewportDepthCount, pViewportDepths); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(CommandBufferInheritanceViewportScissorInfoNV const &) const = default; #else bool operator==(CommandBufferInheritanceViewportScissorInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (viewportScissor2D == rhs.viewportScissor2D) && (viewportDepthCount == rhs.viewportDepthCount) && (pViewportDepths == rhs.pViewportDepths); # endif } bool operator!=(CommandBufferInheritanceViewportScissorInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceViewportScissorInfoNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D = {}; uint32_t viewportDepthCount = {}; const VULKAN_HPP_NAMESPACE::Viewport *pViewportDepths = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV) == sizeof(VkCommandBufferInheritanceViewportScissorInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "CommandBufferInheritanceViewportScissorInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = CommandBufferInheritanceViewportScissorInfoNV; }; struct CommandBufferSubmitInfo { using NativeType = VkCommandBufferSubmitInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferSubmitInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfo(VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ = {}, uint32_t deviceMask_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), commandBuffer(commandBuffer_), deviceMask(deviceMask_) { } VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfo(CommandBufferSubmitInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; CommandBufferSubmitInfo(VkCommandBufferSubmitInfo const &rhs) VULKAN_HPP_NOEXCEPT : CommandBufferSubmitInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ CommandBufferSubmitInfo &operator=(CommandBufferSubmitInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; CommandBufferSubmitInfo &operator=(VkCommandBufferSubmitInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo &setCommandBuffer(VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_) VULKAN_HPP_NOEXCEPT { commandBuffer = commandBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo &setDeviceMask(uint32_t deviceMask_) VULKAN_HPP_NOEXCEPT { deviceMask = deviceMask_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkCommandBufferSubmitInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkCommandBufferSubmitInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, commandBuffer, deviceMask); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(CommandBufferSubmitInfo const &) const = default; #else bool operator==(CommandBufferSubmitInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (commandBuffer == rhs.commandBuffer) && (deviceMask == rhs.deviceMask); # endif } bool operator!=(CommandBufferSubmitInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferSubmitInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer = {}; uint32_t deviceMask = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo) == sizeof(VkCommandBufferSubmitInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "CommandBufferSubmitInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = CommandBufferSubmitInfo; }; using CommandBufferSubmitInfoKHR = CommandBufferSubmitInfo; struct CommandPoolCreateInfo { using NativeType = VkCommandPoolCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandPoolCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo(VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), queueFamilyIndex(queueFamilyIndex_) { } VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo(CommandPoolCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; CommandPoolCreateInfo(VkCommandPoolCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : CommandPoolCreateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ CommandPoolCreateInfo &operator=(CommandPoolCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; CommandPoolCreateInfo &operator=(VkCommandPoolCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo &setFlags(VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo &setQueueFamilyIndex(uint32_t queueFamilyIndex_) VULKAN_HPP_NOEXCEPT { queueFamilyIndex = queueFamilyIndex_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkCommandPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkCommandPoolCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, queueFamilyIndex); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(CommandPoolCreateInfo const &) const = default; #else bool operator==(CommandPoolCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (queueFamilyIndex == rhs.queueFamilyIndex); # endif } bool operator!=(CommandPoolCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandPoolCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags = {}; uint32_t queueFamilyIndex = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo) == sizeof(VkCommandPoolCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "CommandPoolCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = CommandPoolCreateInfo; }; struct SpecializationMapEntry { using NativeType = VkSpecializationMapEntry; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SpecializationMapEntry(uint32_t constantID_ = {}, uint32_t offset_ = {}, size_t size_ = {}) VULKAN_HPP_NOEXCEPT : constantID(constantID_), offset(offset_), size(size_) { } VULKAN_HPP_CONSTEXPR SpecializationMapEntry(SpecializationMapEntry const &rhs) VULKAN_HPP_NOEXCEPT = default; SpecializationMapEntry(VkSpecializationMapEntry const &rhs) VULKAN_HPP_NOEXCEPT : SpecializationMapEntry(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SpecializationMapEntry &operator=(SpecializationMapEntry const &rhs) VULKAN_HPP_NOEXCEPT = default; SpecializationMapEntry &operator=(VkSpecializationMapEntry const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry &setConstantID(uint32_t constantID_) VULKAN_HPP_NOEXCEPT { constantID = constantID_; return *this; } VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry &setOffset(uint32_t offset_) VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry &setSize(size_t size_) VULKAN_HPP_NOEXCEPT { size = size_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSpecializationMapEntry const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSpecializationMapEntry &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(constantID, offset, size); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SpecializationMapEntry const &) const = default; #else bool operator==(SpecializationMapEntry const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (constantID == rhs.constantID) && (offset == rhs.offset) && (size == rhs.size); # endif } bool operator!=(SpecializationMapEntry const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t constantID = {}; uint32_t offset = {}; size_t size = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SpecializationMapEntry) == sizeof(VkSpecializationMapEntry), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SpecializationMapEntry is not nothrow_move_constructible!"); struct SpecializationInfo { using NativeType = VkSpecializationInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SpecializationInfo(uint32_t mapEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::SpecializationMapEntry *pMapEntries_ = {}, size_t dataSize_ = {}, const void *pData_ = {}) VULKAN_HPP_NOEXCEPT : mapEntryCount(mapEntryCount_), pMapEntries(pMapEntries_), dataSize(dataSize_), pData(pData_) { } VULKAN_HPP_CONSTEXPR SpecializationInfo(SpecializationInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SpecializationInfo(VkSpecializationInfo const &rhs) VULKAN_HPP_NOEXCEPT : SpecializationInfo(*reinterpret_cast(&rhs)) {} # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) template SpecializationInfo(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &mapEntries_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &data_ = {}) : mapEntryCount(static_cast(mapEntries_.size())) , pMapEntries(mapEntries_.data()) , dataSize(data_.size() * sizeof(T)) , pData(data_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SpecializationInfo &operator=(SpecializationInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SpecializationInfo &operator=(VkSpecializationInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SpecializationInfo &setMapEntryCount(uint32_t mapEntryCount_) VULKAN_HPP_NOEXCEPT { mapEntryCount = mapEntryCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 SpecializationInfo &setPMapEntries(const VULKAN_HPP_NAMESPACE::SpecializationMapEntry *pMapEntries_) VULKAN_HPP_NOEXCEPT { pMapEntries = pMapEntries_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SpecializationInfo & setMapEntries(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &mapEntries_) VULKAN_HPP_NOEXCEPT { mapEntryCount = static_cast(mapEntries_.size()); pMapEntries = mapEntries_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 SpecializationInfo &setDataSize(size_t dataSize_) VULKAN_HPP_NOEXCEPT { dataSize = dataSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 SpecializationInfo &setPData(const void *pData_) VULKAN_HPP_NOEXCEPT { pData = pData_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) template SpecializationInfo &setData(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &data_) VULKAN_HPP_NOEXCEPT { dataSize = data_.size() * sizeof(T); pData = data_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSpecializationInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSpecializationInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(mapEntryCount, pMapEntries, dataSize, pData); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SpecializationInfo const &) const = default; #else bool operator==(SpecializationInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (mapEntryCount == rhs.mapEntryCount) && (pMapEntries == rhs.pMapEntries) && (dataSize == rhs.dataSize) && (pData == rhs.pData); # endif } bool operator!=(SpecializationInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t mapEntryCount = {}; const VULKAN_HPP_NAMESPACE::SpecializationMapEntry *pMapEntries = {}; size_t dataSize = {}; const void *pData = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SpecializationInfo) == sizeof(VkSpecializationInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SpecializationInfo is not nothrow_move_constructible!"); struct PipelineShaderStageCreateInfo { using NativeType = VkPipelineShaderStageCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo(VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex, VULKAN_HPP_NAMESPACE::ShaderModule module_ = {}, const char *pName_ = {}, const VULKAN_HPP_NAMESPACE::SpecializationInfo *pSpecializationInfo_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), stage(stage_), module(module_), pName(pName_), pSpecializationInfo(pSpecializationInfo_) { } VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo(PipelineShaderStageCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineShaderStageCreateInfo(VkPipelineShaderStageCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : PipelineShaderStageCreateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineShaderStageCreateInfo &operator=(PipelineShaderStageCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineShaderStageCreateInfo &operator=(VkPipelineShaderStageCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo &setFlags(VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo &setStage(VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_) VULKAN_HPP_NOEXCEPT { stage = stage_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo &setModule(VULKAN_HPP_NAMESPACE::ShaderModule module_) VULKAN_HPP_NOEXCEPT { module = module_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo &setPName(const char *pName_) VULKAN_HPP_NOEXCEPT { pName = pName_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setPSpecializationInfo(const VULKAN_HPP_NAMESPACE::SpecializationInfo *pSpecializationInfo_) VULKAN_HPP_NOEXCEPT { pSpecializationInfo = pSpecializationInfo_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineShaderStageCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineShaderStageCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, stage, module, pName, pSpecializationInfo); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) std::strong_ordering operator<=>(PipelineShaderStageCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { if(auto cmp = sType <=> rhs.sType; cmp != 0) return cmp; if(auto cmp = pNext <=> rhs.pNext; cmp != 0) return cmp; if(auto cmp = flags <=> rhs.flags; cmp != 0) return cmp; if(auto cmp = stage <=> rhs.stage; cmp != 0) return cmp; if(auto cmp = module <=> rhs.module; cmp != 0) return cmp; if(pName != rhs.pName) if(auto cmp = strcmp(pName, rhs.pName); cmp != 0) return (cmp < 0) ? std::strong_ordering::less : std::strong_ordering::greater; if(auto cmp = pSpecializationInfo <=> rhs.pSpecializationInfo; cmp != 0) return cmp; return std::strong_ordering::equivalent; } #endif bool operator==(PipelineShaderStageCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (stage == rhs.stage) && (module == rhs.module) && ((pName == rhs.pName) || (strcmp(pName, rhs.pName) == 0)) && (pSpecializationInfo == rhs.pSpecializationInfo); } bool operator!=(PipelineShaderStageCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags = {}; VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex; VULKAN_HPP_NAMESPACE::ShaderModule module = {}; const char *pName = {}; const VULKAN_HPP_NAMESPACE::SpecializationInfo *pSpecializationInfo = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo) == sizeof(VkPipelineShaderStageCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineShaderStageCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineShaderStageCreateInfo; }; struct ComputePipelineCreateInfo { using NativeType = VkComputePipelineCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputePipelineCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), stage(stage_), layout(layout_), basePipelineHandle(basePipelineHandle_), basePipelineIndex(basePipelineIndex_) { } VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo(ComputePipelineCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; ComputePipelineCreateInfo(VkComputePipelineCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : ComputePipelineCreateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ComputePipelineCreateInfo &operator=(ComputePipelineCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; ComputePipelineCreateInfo &operator=(VkComputePipelineCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo &setFlags(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo &setStage(VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const &stage_) VULKAN_HPP_NOEXCEPT { stage = stage_; return *this; } VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo &setLayout(VULKAN_HPP_NAMESPACE::PipelineLayout layout_) VULKAN_HPP_NOEXCEPT { layout = layout_; return *this; } VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo &setBasePipelineHandle(VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_) VULKAN_HPP_NOEXCEPT { basePipelineHandle = basePipelineHandle_; return *this; } VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo &setBasePipelineIndex(int32_t basePipelineIndex_) VULKAN_HPP_NOEXCEPT { basePipelineIndex = basePipelineIndex_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkComputePipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkComputePipelineCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, stage, layout, basePipelineHandle, basePipelineIndex); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ComputePipelineCreateInfo const &) const = default; #else bool operator==(ComputePipelineCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (stage == rhs.stage) && (layout == rhs.layout) && (basePipelineHandle == rhs.basePipelineHandle) && (basePipelineIndex == rhs.basePipelineIndex); # endif } bool operator!=(ComputePipelineCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage = {}; VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; int32_t basePipelineIndex = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo) == sizeof(VkComputePipelineCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ComputePipelineCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = ComputePipelineCreateInfo; }; struct ConditionalRenderingBeginInfoEXT { using NativeType = VkConditionalRenderingBeginInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eConditionalRenderingBeginInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), buffer(buffer_), offset(offset_), flags(flags_) { } VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT(ConditionalRenderingBeginInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; ConditionalRenderingBeginInfoEXT(VkConditionalRenderingBeginInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : ConditionalRenderingBeginInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ConditionalRenderingBeginInfoEXT &operator=(ConditionalRenderingBeginInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; ConditionalRenderingBeginInfoEXT &operator=(VkConditionalRenderingBeginInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT &setBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer_) VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT &setOffset(VULKAN_HPP_NAMESPACE::DeviceSize offset_) VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT &setFlags(VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkConditionalRenderingBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkConditionalRenderingBeginInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, buffer, offset, flags); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ConditionalRenderingBeginInfoEXT const &) const = default; #else bool operator==(ConditionalRenderingBeginInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (buffer == rhs.buffer) && (offset == rhs.offset) && (flags == rhs.flags); # endif } bool operator!=(ConditionalRenderingBeginInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eConditionalRenderingBeginInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Buffer buffer = {}; VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT) == sizeof(VkConditionalRenderingBeginInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ConditionalRenderingBeginInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = ConditionalRenderingBeginInfoEXT; }; struct ConformanceVersion { using NativeType = VkConformanceVersion; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ConformanceVersion(uint8_t major_ = {}, uint8_t minor_ = {}, uint8_t subminor_ = {}, uint8_t patch_ = {}) VULKAN_HPP_NOEXCEPT : major(major_), minor(minor_), subminor(subminor_), patch(patch_) { } VULKAN_HPP_CONSTEXPR ConformanceVersion(ConformanceVersion const &rhs) VULKAN_HPP_NOEXCEPT = default; ConformanceVersion(VkConformanceVersion const &rhs) VULKAN_HPP_NOEXCEPT : ConformanceVersion(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ConformanceVersion &operator=(ConformanceVersion const &rhs) VULKAN_HPP_NOEXCEPT = default; ConformanceVersion &operator=(VkConformanceVersion const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ConformanceVersion &setMajor(uint8_t major_) VULKAN_HPP_NOEXCEPT { major = major_; return *this; } VULKAN_HPP_CONSTEXPR_14 ConformanceVersion &setMinor(uint8_t minor_) VULKAN_HPP_NOEXCEPT { minor = minor_; return *this; } VULKAN_HPP_CONSTEXPR_14 ConformanceVersion &setSubminor(uint8_t subminor_) VULKAN_HPP_NOEXCEPT { subminor = subminor_; return *this; } VULKAN_HPP_CONSTEXPR_14 ConformanceVersion &setPatch(uint8_t patch_) VULKAN_HPP_NOEXCEPT { patch = patch_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkConformanceVersion const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkConformanceVersion &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(major, minor, subminor, patch); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ConformanceVersion const &) const = default; #else bool operator==(ConformanceVersion const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (major == rhs.major) && (minor == rhs.minor) && (subminor == rhs.subminor) && (patch == rhs.patch); # endif } bool operator!=(ConformanceVersion const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint8_t major = {}; uint8_t minor = {}; uint8_t subminor = {}; uint8_t patch = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ConformanceVersion) == sizeof(VkConformanceVersion), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ConformanceVersion is not nothrow_move_constructible!"); using ConformanceVersionKHR = ConformanceVersion; struct CooperativeMatrixPropertiesNV { using NativeType = VkCooperativeMatrixPropertiesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixPropertiesNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV(uint32_t MSize_ = {}, uint32_t NSize_ = {}, uint32_t KSize_ = {}, VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ScopeNV scope_ = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), MSize(MSize_), NSize(NSize_), KSize(KSize_), AType(AType_), BType(BType_), CType(CType_), DType(DType_), scope(scope_) { } VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV(CooperativeMatrixPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; CooperativeMatrixPropertiesNV(VkCooperativeMatrixPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT : CooperativeMatrixPropertiesNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ CooperativeMatrixPropertiesNV &operator=(CooperativeMatrixPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; CooperativeMatrixPropertiesNV &operator=(VkCooperativeMatrixPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV &setMSize(uint32_t MSize_) VULKAN_HPP_NOEXCEPT { MSize = MSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV &setNSize(uint32_t NSize_) VULKAN_HPP_NOEXCEPT { NSize = NSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV &setKSize(uint32_t KSize_) VULKAN_HPP_NOEXCEPT { KSize = KSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV &setAType(VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_) VULKAN_HPP_NOEXCEPT { AType = AType_; return *this; } VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV &setBType(VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_) VULKAN_HPP_NOEXCEPT { BType = BType_; return *this; } VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV &setCType(VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_) VULKAN_HPP_NOEXCEPT { CType = CType_; return *this; } VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV &setDType(VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_) VULKAN_HPP_NOEXCEPT { DType = DType_; return *this; } VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV &setScope(VULKAN_HPP_NAMESPACE::ScopeNV scope_) VULKAN_HPP_NOEXCEPT { scope = scope_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, MSize, NSize, KSize, AType, BType, CType, DType, scope); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(CooperativeMatrixPropertiesNV const &) const = default; #else bool operator==(CooperativeMatrixPropertiesNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (MSize == rhs.MSize) && (NSize == rhs.NSize) && (KSize == rhs.KSize) && (AType == rhs.AType) && (BType == rhs.BType) && (CType == rhs.CType) && (DType == rhs.DType) && (scope == rhs.scope); # endif } bool operator!=(CooperativeMatrixPropertiesNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixPropertiesNV; void *pNext = {}; uint32_t MSize = {}; uint32_t NSize = {}; uint32_t KSize = {}; VULKAN_HPP_NAMESPACE::ComponentTypeNV AType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; VULKAN_HPP_NAMESPACE::ComponentTypeNV BType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; VULKAN_HPP_NAMESPACE::ComponentTypeNV CType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; VULKAN_HPP_NAMESPACE::ComponentTypeNV DType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; VULKAN_HPP_NAMESPACE::ScopeNV scope = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV) == sizeof(VkCooperativeMatrixPropertiesNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "CooperativeMatrixPropertiesNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = CooperativeMatrixPropertiesNV; }; struct CopyAccelerationStructureInfoKHR { using NativeType = VkCopyAccelerationStructureInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), src(src_), dst(dst_), mode(mode_) { } VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR(CopyAccelerationStructureInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; CopyAccelerationStructureInfoKHR(VkCopyAccelerationStructureInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : CopyAccelerationStructureInfoKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ CopyAccelerationStructureInfoKHR &operator=(CopyAccelerationStructureInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; CopyAccelerationStructureInfoKHR &operator=(VkCopyAccelerationStructureInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR &setSrc(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_) VULKAN_HPP_NOEXCEPT { src = src_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR &setDst(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_) VULKAN_HPP_NOEXCEPT { dst = dst_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR &setMode(VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_) VULKAN_HPP_NOEXCEPT { mode = mode_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkCopyAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkCopyAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, src, dst, mode); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(CopyAccelerationStructureInfoKHR const &) const = default; #else bool operator==(CopyAccelerationStructureInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (src == rhs.src) && (dst == rhs.dst) && (mode == rhs.mode); # endif } bool operator!=(CopyAccelerationStructureInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {}; VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {}; VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR) == sizeof(VkCopyAccelerationStructureInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "CopyAccelerationStructureInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = CopyAccelerationStructureInfoKHR; }; struct CopyAccelerationStructureToMemoryInfoKHR { using NativeType = VkCopyAccelerationStructureToMemoryInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), src(src_), dst(dst_), mode(mode_) { } VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR(CopyAccelerationStructureToMemoryInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; CopyAccelerationStructureToMemoryInfoKHR(VkCopyAccelerationStructureToMemoryInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : CopyAccelerationStructureToMemoryInfoKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ CopyAccelerationStructureToMemoryInfoKHR &operator=(CopyAccelerationStructureToMemoryInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; CopyAccelerationStructureToMemoryInfoKHR &operator=(VkCopyAccelerationStructureToMemoryInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR &setSrc(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_) VULKAN_HPP_NOEXCEPT { src = src_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR &setDst(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const &dst_) VULKAN_HPP_NOEXCEPT { dst = dst_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setMode(VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_) VULKAN_HPP_NOEXCEPT { mode = mode_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkCopyAccelerationStructureToMemoryInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkCopyAccelerationStructureToMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, src, dst, mode); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {}; VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst = {}; VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR) == sizeof(VkCopyAccelerationStructureToMemoryInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "CopyAccelerationStructureToMemoryInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = CopyAccelerationStructureToMemoryInfoKHR; }; struct CopyBufferInfo2 { using NativeType = VkCopyBufferInfo2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferInfo2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR CopyBufferInfo2(VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferCopy2 *pRegions_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), srcBuffer(srcBuffer_), dstBuffer(dstBuffer_), regionCount(regionCount_), pRegions(pRegions_) { } VULKAN_HPP_CONSTEXPR CopyBufferInfo2(CopyBufferInfo2 const &rhs) VULKAN_HPP_NOEXCEPT = default; CopyBufferInfo2(VkCopyBufferInfo2 const &rhs) VULKAN_HPP_NOEXCEPT : CopyBufferInfo2(*reinterpret_cast(&rhs)) {} # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) CopyBufferInfo2(VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const ®ions_, const void *pNext_ = nullptr) : pNext(pNext_) , srcBuffer(srcBuffer_) , dstBuffer(dstBuffer_) , regionCount(static_cast(regions_.size())) , pRegions(regions_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ CopyBufferInfo2 &operator=(CopyBufferInfo2 const &rhs) VULKAN_HPP_NOEXCEPT = default; CopyBufferInfo2 &operator=(VkCopyBufferInfo2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 &setSrcBuffer(VULKAN_HPP_NAMESPACE::Buffer srcBuffer_) VULKAN_HPP_NOEXCEPT { srcBuffer = srcBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 &setDstBuffer(VULKAN_HPP_NAMESPACE::Buffer dstBuffer_) VULKAN_HPP_NOEXCEPT { dstBuffer = dstBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 &setRegionCount(uint32_t regionCount_) VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 &setPRegions(const VULKAN_HPP_NAMESPACE::BufferCopy2 *pRegions_) VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) CopyBufferInfo2 &setRegions(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const ®ions_) VULKAN_HPP_NOEXCEPT { regionCount = static_cast(regions_.size()); pRegions = regions_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkCopyBufferInfo2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkCopyBufferInfo2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, srcBuffer, dstBuffer, regionCount, pRegions); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(CopyBufferInfo2 const &) const = default; #else bool operator==(CopyBufferInfo2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (srcBuffer == rhs.srcBuffer) && (dstBuffer == rhs.dstBuffer) && (regionCount == rhs.regionCount) && (pRegions == rhs.pRegions); # endif } bool operator!=(CopyBufferInfo2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferInfo2; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {}; VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {}; uint32_t regionCount = {}; const VULKAN_HPP_NAMESPACE::BufferCopy2 *pRegions = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::CopyBufferInfo2) == sizeof(VkCopyBufferInfo2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "CopyBufferInfo2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = CopyBufferInfo2; }; using CopyBufferInfo2KHR = CopyBufferInfo2; struct CopyBufferToImageInfo2 { using NativeType = VkCopyBufferToImageInfo2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferToImageInfo2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2(VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferImageCopy2 *pRegions_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), srcBuffer(srcBuffer_), dstImage(dstImage_), dstImageLayout(dstImageLayout_), regionCount(regionCount_), pRegions(pRegions_) { } VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2(CopyBufferToImageInfo2 const &rhs) VULKAN_HPP_NOEXCEPT = default; CopyBufferToImageInfo2(VkCopyBufferToImageInfo2 const &rhs) VULKAN_HPP_NOEXCEPT : CopyBufferToImageInfo2(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) CopyBufferToImageInfo2(VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const ®ions_, const void *pNext_ = nullptr) : pNext(pNext_) , srcBuffer(srcBuffer_) , dstImage(dstImage_) , dstImageLayout(dstImageLayout_) , regionCount(static_cast(regions_.size())) , pRegions(regions_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ CopyBufferToImageInfo2 &operator=(CopyBufferToImageInfo2 const &rhs) VULKAN_HPP_NOEXCEPT = default; CopyBufferToImageInfo2 &operator=(VkCopyBufferToImageInfo2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 &setSrcBuffer(VULKAN_HPP_NAMESPACE::Buffer srcBuffer_) VULKAN_HPP_NOEXCEPT { srcBuffer = srcBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 &setDstImage(VULKAN_HPP_NAMESPACE::Image dstImage_) VULKAN_HPP_NOEXCEPT { dstImage = dstImage_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 &setDstImageLayout(VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_) VULKAN_HPP_NOEXCEPT { dstImageLayout = dstImageLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 &setRegionCount(uint32_t regionCount_) VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 &setPRegions(const VULKAN_HPP_NAMESPACE::BufferImageCopy2 *pRegions_) VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) CopyBufferToImageInfo2 & setRegions(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const ®ions_) VULKAN_HPP_NOEXCEPT { regionCount = static_cast(regions_.size()); pRegions = regions_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkCopyBufferToImageInfo2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkCopyBufferToImageInfo2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(CopyBufferToImageInfo2 const &) const = default; #else bool operator==(CopyBufferToImageInfo2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (srcBuffer == rhs.srcBuffer) && (dstImage == rhs.dstImage) && (dstImageLayout == rhs.dstImageLayout) && (regionCount == rhs.regionCount) && (pRegions == rhs.pRegions); # endif } bool operator!=(CopyBufferToImageInfo2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferToImageInfo2; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {}; VULKAN_HPP_NAMESPACE::Image dstImage = {}; VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; uint32_t regionCount = {}; const VULKAN_HPP_NAMESPACE::BufferImageCopy2 *pRegions = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2) == sizeof(VkCopyBufferToImageInfo2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "CopyBufferToImageInfo2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = CopyBufferToImageInfo2; }; using CopyBufferToImageInfo2KHR = CopyBufferToImageInfo2; struct CopyCommandTransformInfoQCOM { using NativeType = VkCopyCommandTransformInfoQCOM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyCommandTransformInfoQCOM; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR CopyCommandTransformInfoQCOM(VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), transform(transform_) { } VULKAN_HPP_CONSTEXPR CopyCommandTransformInfoQCOM(CopyCommandTransformInfoQCOM const &rhs) VULKAN_HPP_NOEXCEPT = default; CopyCommandTransformInfoQCOM(VkCopyCommandTransformInfoQCOM const &rhs) VULKAN_HPP_NOEXCEPT : CopyCommandTransformInfoQCOM(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ CopyCommandTransformInfoQCOM &operator=(CopyCommandTransformInfoQCOM const &rhs) VULKAN_HPP_NOEXCEPT = default; CopyCommandTransformInfoQCOM &operator=(VkCopyCommandTransformInfoQCOM const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM &setTransform(VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_) VULKAN_HPP_NOEXCEPT { transform = transform_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkCopyCommandTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkCopyCommandTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, transform); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(CopyCommandTransformInfoQCOM const &) const = default; #else bool operator==(CopyCommandTransformInfoQCOM const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (transform == rhs.transform); # endif } bool operator!=(CopyCommandTransformInfoQCOM const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyCommandTransformInfoQCOM; const void *pNext = {}; VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM) == sizeof(VkCopyCommandTransformInfoQCOM), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "CopyCommandTransformInfoQCOM is not nothrow_move_constructible!"); template<> struct CppType { using Type = CopyCommandTransformInfoQCOM; }; struct CopyDescriptorSet { using NativeType = VkCopyDescriptorSet; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyDescriptorSet; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR CopyDescriptorSet(VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ = {}, uint32_t srcBinding_ = {}, uint32_t srcArrayElement_ = {}, VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), srcSet(srcSet_), srcBinding(srcBinding_), srcArrayElement(srcArrayElement_), dstSet(dstSet_), dstBinding(dstBinding_), dstArrayElement(dstArrayElement_), descriptorCount(descriptorCount_) { } VULKAN_HPP_CONSTEXPR CopyDescriptorSet(CopyDescriptorSet const &rhs) VULKAN_HPP_NOEXCEPT = default; CopyDescriptorSet(VkCopyDescriptorSet const &rhs) VULKAN_HPP_NOEXCEPT : CopyDescriptorSet(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ CopyDescriptorSet &operator=(CopyDescriptorSet const &rhs) VULKAN_HPP_NOEXCEPT = default; CopyDescriptorSet &operator=(VkCopyDescriptorSet const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet &setSrcSet(VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_) VULKAN_HPP_NOEXCEPT { srcSet = srcSet_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet &setSrcBinding(uint32_t srcBinding_) VULKAN_HPP_NOEXCEPT { srcBinding = srcBinding_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet &setSrcArrayElement(uint32_t srcArrayElement_) VULKAN_HPP_NOEXCEPT { srcArrayElement = srcArrayElement_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet &setDstSet(VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_) VULKAN_HPP_NOEXCEPT { dstSet = dstSet_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet &setDstBinding(uint32_t dstBinding_) VULKAN_HPP_NOEXCEPT { dstBinding = dstBinding_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet &setDstArrayElement(uint32_t dstArrayElement_) VULKAN_HPP_NOEXCEPT { dstArrayElement = dstArrayElement_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet &setDescriptorCount(uint32_t descriptorCount_) VULKAN_HPP_NOEXCEPT { descriptorCount = descriptorCount_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkCopyDescriptorSet const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkCopyDescriptorSet &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, srcSet, srcBinding, srcArrayElement, dstSet, dstBinding, dstArrayElement, descriptorCount); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(CopyDescriptorSet const &) const = default; #else bool operator==(CopyDescriptorSet const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (srcSet == rhs.srcSet) && (srcBinding == rhs.srcBinding) && (srcArrayElement == rhs.srcArrayElement) && (dstSet == rhs.dstSet) && (dstBinding == rhs.dstBinding) && (dstArrayElement == rhs.dstArrayElement) && (descriptorCount == rhs.descriptorCount); # endif } bool operator!=(CopyDescriptorSet const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyDescriptorSet; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DescriptorSet srcSet = {}; uint32_t srcBinding = {}; uint32_t srcArrayElement = {}; VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {}; uint32_t dstBinding = {}; uint32_t dstArrayElement = {}; uint32_t descriptorCount = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::CopyDescriptorSet) == sizeof(VkCopyDescriptorSet), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "CopyDescriptorSet is not nothrow_move_constructible!"); template<> struct CppType { using Type = CopyDescriptorSet; }; struct ImageCopy2 { using NativeType = VkImageCopy2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCopy2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImageCopy2(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), srcSubresource(srcSubresource_), srcOffset(srcOffset_), dstSubresource(dstSubresource_), dstOffset(dstOffset_), extent(extent_) { } VULKAN_HPP_CONSTEXPR ImageCopy2(ImageCopy2 const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageCopy2(VkImageCopy2 const &rhs) VULKAN_HPP_NOEXCEPT : ImageCopy2(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageCopy2 &operator=(ImageCopy2 const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageCopy2 &operator=(VkImageCopy2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImageCopy2 &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCopy2 &setSrcSubresource(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &srcSubresource_) VULKAN_HPP_NOEXCEPT { srcSubresource = srcSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCopy2 &setSrcOffset(VULKAN_HPP_NAMESPACE::Offset3D const &srcOffset_) VULKAN_HPP_NOEXCEPT { srcOffset = srcOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCopy2 &setDstSubresource(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &dstSubresource_) VULKAN_HPP_NOEXCEPT { dstSubresource = dstSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCopy2 &setDstOffset(VULKAN_HPP_NAMESPACE::Offset3D const &dstOffset_) VULKAN_HPP_NOEXCEPT { dstOffset = dstOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCopy2 &setExtent(VULKAN_HPP_NAMESPACE::Extent3D const &extent_) VULKAN_HPP_NOEXCEPT { extent = extent_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImageCopy2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImageCopy2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, srcSubresource, srcOffset, dstSubresource, dstOffset, extent); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImageCopy2 const &) const = default; #else bool operator==(ImageCopy2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (srcSubresource == rhs.srcSubresource) && (srcOffset == rhs.srcOffset) && (dstSubresource == rhs.dstSubresource) && (dstOffset == rhs.dstOffset) && (extent == rhs.extent); # endif } bool operator!=(ImageCopy2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCopy2; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; VULKAN_HPP_NAMESPACE::Extent3D extent = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImageCopy2) == sizeof(VkImageCopy2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImageCopy2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImageCopy2; }; using ImageCopy2KHR = ImageCopy2; struct CopyImageInfo2 { using NativeType = VkCopyImageInfo2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageInfo2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR CopyImageInfo2(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageCopy2 *pRegions_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), srcImage(srcImage_), srcImageLayout(srcImageLayout_), dstImage(dstImage_), dstImageLayout(dstImageLayout_), regionCount(regionCount_), pRegions(pRegions_) { } VULKAN_HPP_CONSTEXPR CopyImageInfo2(CopyImageInfo2 const &rhs) VULKAN_HPP_NOEXCEPT = default; CopyImageInfo2(VkCopyImageInfo2 const &rhs) VULKAN_HPP_NOEXCEPT : CopyImageInfo2(*reinterpret_cast(&rhs)) {} # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) CopyImageInfo2(VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const ®ions_, const void *pNext_ = nullptr) : pNext(pNext_) , srcImage(srcImage_) , srcImageLayout(srcImageLayout_) , dstImage(dstImage_) , dstImageLayout(dstImageLayout_) , regionCount(static_cast(regions_.size())) , pRegions(regions_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ CopyImageInfo2 &operator=(CopyImageInfo2 const &rhs) VULKAN_HPP_NOEXCEPT = default; CopyImageInfo2 &operator=(VkCopyImageInfo2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 &setSrcImage(VULKAN_HPP_NAMESPACE::Image srcImage_) VULKAN_HPP_NOEXCEPT { srcImage = srcImage_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 &setSrcImageLayout(VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_) VULKAN_HPP_NOEXCEPT { srcImageLayout = srcImageLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 &setDstImage(VULKAN_HPP_NAMESPACE::Image dstImage_) VULKAN_HPP_NOEXCEPT { dstImage = dstImage_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 &setDstImageLayout(VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_) VULKAN_HPP_NOEXCEPT { dstImageLayout = dstImageLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 &setRegionCount(uint32_t regionCount_) VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 &setPRegions(const VULKAN_HPP_NAMESPACE::ImageCopy2 *pRegions_) VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) CopyImageInfo2 &setRegions(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const ®ions_) VULKAN_HPP_NOEXCEPT { regionCount = static_cast(regions_.size()); pRegions = regions_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkCopyImageInfo2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkCopyImageInfo2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(CopyImageInfo2 const &) const = default; #else bool operator==(CopyImageInfo2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (srcImage == rhs.srcImage) && (srcImageLayout == rhs.srcImageLayout) && (dstImage == rhs.dstImage) && (dstImageLayout == rhs.dstImageLayout) && (regionCount == rhs.regionCount) && (pRegions == rhs.pRegions); # endif } bool operator!=(CopyImageInfo2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageInfo2; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Image srcImage = {}; VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; VULKAN_HPP_NAMESPACE::Image dstImage = {}; VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; uint32_t regionCount = {}; const VULKAN_HPP_NAMESPACE::ImageCopy2 *pRegions = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::CopyImageInfo2) == sizeof(VkCopyImageInfo2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "CopyImageInfo2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = CopyImageInfo2; }; using CopyImageInfo2KHR = CopyImageInfo2; struct CopyImageToBufferInfo2 { using NativeType = VkCopyImageToBufferInfo2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToBufferInfo2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferImageCopy2 *pRegions_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), srcImage(srcImage_), srcImageLayout(srcImageLayout_), dstBuffer(dstBuffer_), regionCount(regionCount_), pRegions(pRegions_) { } VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2(CopyImageToBufferInfo2 const &rhs) VULKAN_HPP_NOEXCEPT = default; CopyImageToBufferInfo2(VkCopyImageToBufferInfo2 const &rhs) VULKAN_HPP_NOEXCEPT : CopyImageToBufferInfo2(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) CopyImageToBufferInfo2(VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const ®ions_, const void *pNext_ = nullptr) : pNext(pNext_) , srcImage(srcImage_) , srcImageLayout(srcImageLayout_) , dstBuffer(dstBuffer_) , regionCount(static_cast(regions_.size())) , pRegions(regions_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ CopyImageToBufferInfo2 &operator=(CopyImageToBufferInfo2 const &rhs) VULKAN_HPP_NOEXCEPT = default; CopyImageToBufferInfo2 &operator=(VkCopyImageToBufferInfo2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 &setSrcImage(VULKAN_HPP_NAMESPACE::Image srcImage_) VULKAN_HPP_NOEXCEPT { srcImage = srcImage_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 &setSrcImageLayout(VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_) VULKAN_HPP_NOEXCEPT { srcImageLayout = srcImageLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 &setDstBuffer(VULKAN_HPP_NAMESPACE::Buffer dstBuffer_) VULKAN_HPP_NOEXCEPT { dstBuffer = dstBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 &setRegionCount(uint32_t regionCount_) VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 &setPRegions(const VULKAN_HPP_NAMESPACE::BufferImageCopy2 *pRegions_) VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) CopyImageToBufferInfo2 & setRegions(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const ®ions_) VULKAN_HPP_NOEXCEPT { regionCount = static_cast(regions_.size()); pRegions = regions_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkCopyImageToBufferInfo2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkCopyImageToBufferInfo2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(CopyImageToBufferInfo2 const &) const = default; #else bool operator==(CopyImageToBufferInfo2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (srcImage == rhs.srcImage) && (srcImageLayout == rhs.srcImageLayout) && (dstBuffer == rhs.dstBuffer) && (regionCount == rhs.regionCount) && (pRegions == rhs.pRegions); # endif } bool operator!=(CopyImageToBufferInfo2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToBufferInfo2; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Image srcImage = {}; VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {}; uint32_t regionCount = {}; const VULKAN_HPP_NAMESPACE::BufferImageCopy2 *pRegions = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2) == sizeof(VkCopyImageToBufferInfo2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "CopyImageToBufferInfo2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = CopyImageToBufferInfo2; }; using CopyImageToBufferInfo2KHR = CopyImageToBufferInfo2; struct CopyMemoryToAccelerationStructureInfoKHR { using NativeType = VkCopyMemoryToAccelerationStructureInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), src(src_), dst(dst_), mode(mode_) { } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR(CopyMemoryToAccelerationStructureInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; CopyMemoryToAccelerationStructureInfoKHR(VkCopyMemoryToAccelerationStructureInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : CopyMemoryToAccelerationStructureInfoKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ CopyMemoryToAccelerationStructureInfoKHR &operator=(CopyMemoryToAccelerationStructureInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; CopyMemoryToAccelerationStructureInfoKHR &operator=(VkCopyMemoryToAccelerationStructureInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setSrc(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const &src_) VULKAN_HPP_NOEXCEPT { src = src_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR &setDst(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_) VULKAN_HPP_NOEXCEPT { dst = dst_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setMode(VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_) VULKAN_HPP_NOEXCEPT { mode = mode_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkCopyMemoryToAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkCopyMemoryToAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, src, dst, mode); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src = {}; VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {}; VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR) == sizeof(VkCopyMemoryToAccelerationStructureInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "CopyMemoryToAccelerationStructureInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = CopyMemoryToAccelerationStructureInfoKHR; }; struct CuFunctionCreateInfoNVX { using NativeType = VkCuFunctionCreateInfoNVX; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuFunctionCreateInfoNVX; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR CuFunctionCreateInfoNVX(VULKAN_HPP_NAMESPACE::CuModuleNVX module_ = {}, const char *pName_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), module(module_), pName(pName_) { } VULKAN_HPP_CONSTEXPR CuFunctionCreateInfoNVX(CuFunctionCreateInfoNVX const &rhs) VULKAN_HPP_NOEXCEPT = default; CuFunctionCreateInfoNVX(VkCuFunctionCreateInfoNVX const &rhs) VULKAN_HPP_NOEXCEPT : CuFunctionCreateInfoNVX(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ CuFunctionCreateInfoNVX &operator=(CuFunctionCreateInfoNVX const &rhs) VULKAN_HPP_NOEXCEPT = default; CuFunctionCreateInfoNVX &operator=(VkCuFunctionCreateInfoNVX const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX &setModule(VULKAN_HPP_NAMESPACE::CuModuleNVX module_) VULKAN_HPP_NOEXCEPT { module = module_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX &setPName(const char *pName_) VULKAN_HPP_NOEXCEPT { pName = pName_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkCuFunctionCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkCuFunctionCreateInfoNVX &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, module, pName); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) std::strong_ordering operator<=>(CuFunctionCreateInfoNVX const &rhs) const VULKAN_HPP_NOEXCEPT { if(auto cmp = sType <=> rhs.sType; cmp != 0) return cmp; if(auto cmp = pNext <=> rhs.pNext; cmp != 0) return cmp; if(auto cmp = module <=> rhs.module; cmp != 0) return cmp; if(pName != rhs.pName) if(auto cmp = strcmp(pName, rhs.pName); cmp != 0) return (cmp < 0) ? std::strong_ordering::less : std::strong_ordering::greater; return std::strong_ordering::equivalent; } #endif bool operator==(CuFunctionCreateInfoNVX const &rhs) const VULKAN_HPP_NOEXCEPT { return (sType == rhs.sType) && (pNext == rhs.pNext) && (module == rhs.module) && ((pName == rhs.pName) || (strcmp(pName, rhs.pName) == 0)); } bool operator!=(CuFunctionCreateInfoNVX const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuFunctionCreateInfoNVX; const void *pNext = {}; VULKAN_HPP_NAMESPACE::CuModuleNVX module = {}; const char *pName = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX) == sizeof(VkCuFunctionCreateInfoNVX), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "CuFunctionCreateInfoNVX is not nothrow_move_constructible!"); template<> struct CppType { using Type = CuFunctionCreateInfoNVX; }; struct CuLaunchInfoNVX { using NativeType = VkCuLaunchInfoNVX; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuLaunchInfoNVX; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR CuLaunchInfoNVX(VULKAN_HPP_NAMESPACE::CuFunctionNVX function_ = {}, uint32_t gridDimX_ = {}, uint32_t gridDimY_ = {}, uint32_t gridDimZ_ = {}, uint32_t blockDimX_ = {}, uint32_t blockDimY_ = {}, uint32_t blockDimZ_ = {}, uint32_t sharedMemBytes_ = {}, size_t paramCount_ = {}, const void *const *pParams_ = {}, size_t extraCount_ = {}, const void *const *pExtras_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), function(function_), gridDimX(gridDimX_), gridDimY(gridDimY_), gridDimZ(gridDimZ_), blockDimX(blockDimX_), blockDimY(blockDimY_), blockDimZ(blockDimZ_), sharedMemBytes(sharedMemBytes_), paramCount(paramCount_), pParams(pParams_), extraCount(extraCount_), pExtras(pExtras_) { } VULKAN_HPP_CONSTEXPR CuLaunchInfoNVX(CuLaunchInfoNVX const &rhs) VULKAN_HPP_NOEXCEPT = default; CuLaunchInfoNVX(VkCuLaunchInfoNVX const &rhs) VULKAN_HPP_NOEXCEPT : CuLaunchInfoNVX(*reinterpret_cast(&rhs)) {} # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) CuLaunchInfoNVX(VULKAN_HPP_NAMESPACE::CuFunctionNVX function_, uint32_t gridDimX_, uint32_t gridDimY_, uint32_t gridDimZ_, uint32_t blockDimX_, uint32_t blockDimY_, uint32_t blockDimZ_, uint32_t sharedMemBytes_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const ¶ms_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &extras_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , function(function_) , gridDimX(gridDimX_) , gridDimY(gridDimY_) , gridDimZ(gridDimZ_) , blockDimX(blockDimX_) , blockDimY(blockDimY_) , blockDimZ(blockDimZ_) , sharedMemBytes(sharedMemBytes_) , paramCount(params_.size()) , pParams(params_.data()) , extraCount(extras_.size()) , pExtras(extras_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ CuLaunchInfoNVX &operator=(CuLaunchInfoNVX const &rhs) VULKAN_HPP_NOEXCEPT = default; CuLaunchInfoNVX &operator=(VkCuLaunchInfoNVX const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX &setFunction(VULKAN_HPP_NAMESPACE::CuFunctionNVX function_) VULKAN_HPP_NOEXCEPT { function = function_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX &setGridDimX(uint32_t gridDimX_) VULKAN_HPP_NOEXCEPT { gridDimX = gridDimX_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX &setGridDimY(uint32_t gridDimY_) VULKAN_HPP_NOEXCEPT { gridDimY = gridDimY_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX &setGridDimZ(uint32_t gridDimZ_) VULKAN_HPP_NOEXCEPT { gridDimZ = gridDimZ_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX &setBlockDimX(uint32_t blockDimX_) VULKAN_HPP_NOEXCEPT { blockDimX = blockDimX_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX &setBlockDimY(uint32_t blockDimY_) VULKAN_HPP_NOEXCEPT { blockDimY = blockDimY_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX &setBlockDimZ(uint32_t blockDimZ_) VULKAN_HPP_NOEXCEPT { blockDimZ = blockDimZ_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX &setSharedMemBytes(uint32_t sharedMemBytes_) VULKAN_HPP_NOEXCEPT { sharedMemBytes = sharedMemBytes_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX &setParamCount(size_t paramCount_) VULKAN_HPP_NOEXCEPT { paramCount = paramCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX &setPParams(const void *const *pParams_) VULKAN_HPP_NOEXCEPT { pParams = pParams_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) CuLaunchInfoNVX &setParams(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const ¶ms_) VULKAN_HPP_NOEXCEPT { paramCount = params_.size(); pParams = params_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX &setExtraCount(size_t extraCount_) VULKAN_HPP_NOEXCEPT { extraCount = extraCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX &setPExtras(const void *const *pExtras_) VULKAN_HPP_NOEXCEPT { pExtras = pExtras_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) CuLaunchInfoNVX &setExtras(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &extras_) VULKAN_HPP_NOEXCEPT { extraCount = extras_.size(); pExtras = extras_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkCuLaunchInfoNVX const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkCuLaunchInfoNVX &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, function, gridDimX, gridDimY, gridDimZ, blockDimX, blockDimY, blockDimZ, sharedMemBytes, paramCount, pParams, extraCount, pExtras); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(CuLaunchInfoNVX const &) const = default; #else bool operator==(CuLaunchInfoNVX const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (function == rhs.function) && (gridDimX == rhs.gridDimX) && (gridDimY == rhs.gridDimY) && (gridDimZ == rhs.gridDimZ) && (blockDimX == rhs.blockDimX) && (blockDimY == rhs.blockDimY) && (blockDimZ == rhs.blockDimZ) && (sharedMemBytes == rhs.sharedMemBytes) && (paramCount == rhs.paramCount) && (pParams == rhs.pParams) && (extraCount == rhs.extraCount) && (pExtras == rhs.pExtras); # endif } bool operator!=(CuLaunchInfoNVX const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuLaunchInfoNVX; const void *pNext = {}; VULKAN_HPP_NAMESPACE::CuFunctionNVX function = {}; uint32_t gridDimX = {}; uint32_t gridDimY = {}; uint32_t gridDimZ = {}; uint32_t blockDimX = {}; uint32_t blockDimY = {}; uint32_t blockDimZ = {}; uint32_t sharedMemBytes = {}; size_t paramCount = {}; const void *const *pParams = {}; size_t extraCount = {}; const void *const *pExtras = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX) == sizeof(VkCuLaunchInfoNVX), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "CuLaunchInfoNVX is not nothrow_move_constructible!"); template<> struct CppType { using Type = CuLaunchInfoNVX; }; struct CuModuleCreateInfoNVX { using NativeType = VkCuModuleCreateInfoNVX; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuModuleCreateInfoNVX; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX(size_t dataSize_ = {}, const void *pData_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), dataSize(dataSize_), pData(pData_) { } VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX(CuModuleCreateInfoNVX const &rhs) VULKAN_HPP_NOEXCEPT = default; CuModuleCreateInfoNVX(VkCuModuleCreateInfoNVX const &rhs) VULKAN_HPP_NOEXCEPT : CuModuleCreateInfoNVX(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) template CuModuleCreateInfoNVX(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &data_, const void *pNext_ = nullptr) : pNext(pNext_) , dataSize(data_.size() * sizeof(T)) , pData(data_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ CuModuleCreateInfoNVX &operator=(CuModuleCreateInfoNVX const &rhs) VULKAN_HPP_NOEXCEPT = default; CuModuleCreateInfoNVX &operator=(VkCuModuleCreateInfoNVX const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX &setDataSize(size_t dataSize_) VULKAN_HPP_NOEXCEPT { dataSize = dataSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX &setPData(const void *pData_) VULKAN_HPP_NOEXCEPT { pData = pData_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) template CuModuleCreateInfoNVX &setData(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &data_) VULKAN_HPP_NOEXCEPT { dataSize = data_.size() * sizeof(T); pData = data_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkCuModuleCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkCuModuleCreateInfoNVX &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, dataSize, pData); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(CuModuleCreateInfoNVX const &) const = default; #else bool operator==(CuModuleCreateInfoNVX const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (dataSize == rhs.dataSize) && (pData == rhs.pData); # endif } bool operator!=(CuModuleCreateInfoNVX const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuModuleCreateInfoNVX; const void *pNext = {}; size_t dataSize = {}; const void *pData = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX) == sizeof(VkCuModuleCreateInfoNVX), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "CuModuleCreateInfoNVX is not nothrow_move_constructible!"); template<> struct CppType { using Type = CuModuleCreateInfoNVX; }; #if defined(VK_USE_PLATFORM_WIN32_KHR) struct D3D12FenceSubmitInfoKHR { using NativeType = VkD3D12FenceSubmitInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eD3D12FenceSubmitInfoKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR(uint32_t waitSemaphoreValuesCount_ = {}, const uint64_t *pWaitSemaphoreValues_ = {}, uint32_t signalSemaphoreValuesCount_ = {}, const uint64_t *pSignalSemaphoreValues_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), waitSemaphoreValuesCount(waitSemaphoreValuesCount_), pWaitSemaphoreValues(pWaitSemaphoreValues_), signalSemaphoreValuesCount(signalSemaphoreValuesCount_), pSignalSemaphoreValues(pSignalSemaphoreValues_) { } VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR(D3D12FenceSubmitInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; D3D12FenceSubmitInfoKHR(VkD3D12FenceSubmitInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : D3D12FenceSubmitInfoKHR(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) D3D12FenceSubmitInfoKHR(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &waitSemaphoreValues_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &signalSemaphoreValues_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , waitSemaphoreValuesCount(static_cast(waitSemaphoreValues_.size())) , pWaitSemaphoreValues(waitSemaphoreValues_.data()) , signalSemaphoreValuesCount(static_cast(signalSemaphoreValues_.size())) , pSignalSemaphoreValues(signalSemaphoreValues_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ D3D12FenceSubmitInfoKHR &operator=(D3D12FenceSubmitInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; D3D12FenceSubmitInfoKHR &operator=(VkD3D12FenceSubmitInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR &setWaitSemaphoreValuesCount(uint32_t waitSemaphoreValuesCount_) VULKAN_HPP_NOEXCEPT { waitSemaphoreValuesCount = waitSemaphoreValuesCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR &setPWaitSemaphoreValues(const uint64_t *pWaitSemaphoreValues_) VULKAN_HPP_NOEXCEPT { pWaitSemaphoreValues = pWaitSemaphoreValues_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) D3D12FenceSubmitInfoKHR & setWaitSemaphoreValues(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &waitSemaphoreValues_) VULKAN_HPP_NOEXCEPT { waitSemaphoreValuesCount = static_cast(waitSemaphoreValues_.size()); pWaitSemaphoreValues = waitSemaphoreValues_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR &setSignalSemaphoreValuesCount(uint32_t signalSemaphoreValuesCount_) VULKAN_HPP_NOEXCEPT { signalSemaphoreValuesCount = signalSemaphoreValuesCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR &setPSignalSemaphoreValues(const uint64_t *pSignalSemaphoreValues_) VULKAN_HPP_NOEXCEPT { pSignalSemaphoreValues = pSignalSemaphoreValues_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) D3D12FenceSubmitInfoKHR & setSignalSemaphoreValues(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &signalSemaphoreValues_) VULKAN_HPP_NOEXCEPT { signalSemaphoreValuesCount = static_cast(signalSemaphoreValues_.size()); pSignalSemaphoreValues = signalSemaphoreValues_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkD3D12FenceSubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkD3D12FenceSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, waitSemaphoreValuesCount, pWaitSemaphoreValues, signalSemaphoreValuesCount, pSignalSemaphoreValues); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(D3D12FenceSubmitInfoKHR const &) const = default; # else bool operator==(D3D12FenceSubmitInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount) && (pWaitSemaphoreValues == rhs.pWaitSemaphoreValues) && (signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount) && (pSignalSemaphoreValues == rhs.pSignalSemaphoreValues); # endif } bool operator!=(D3D12FenceSubmitInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR; const void *pNext = {}; uint32_t waitSemaphoreValuesCount = {}; const uint64_t *pWaitSemaphoreValues = {}; uint32_t signalSemaphoreValuesCount = {}; const uint64_t *pSignalSemaphoreValues = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR) == sizeof(VkD3D12FenceSubmitInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "D3D12FenceSubmitInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = D3D12FenceSubmitInfoKHR; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ struct DebugMarkerMarkerInfoEXT { using NativeType = VkDebugMarkerMarkerInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerMarkerInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT(const char *pMarkerName_ = {}, std::array const &color_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), pMarkerName(pMarkerName_), color(color_) { } VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT(DebugMarkerMarkerInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DebugMarkerMarkerInfoEXT(VkDebugMarkerMarkerInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : DebugMarkerMarkerInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DebugMarkerMarkerInfoEXT &operator=(DebugMarkerMarkerInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DebugMarkerMarkerInfoEXT &operator=(VkDebugMarkerMarkerInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT &setPMarkerName(const char *pMarkerName_) VULKAN_HPP_NOEXCEPT { pMarkerName = pMarkerName_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT &setColor(std::array color_) VULKAN_HPP_NOEXCEPT { color = color_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDebugMarkerMarkerInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDebugMarkerMarkerInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, pMarkerName, color); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) std::partial_ordering operator<=>(DebugMarkerMarkerInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { if(auto cmp = sType <=> rhs.sType; cmp != 0) return cmp; if(auto cmp = pNext <=> rhs.pNext; cmp != 0) return cmp; if(pMarkerName != rhs.pMarkerName) if(auto cmp = strcmp(pMarkerName, rhs.pMarkerName); cmp != 0) return (cmp < 0) ? std::partial_ordering::less : std::partial_ordering::greater; if(auto cmp = color <=> rhs.color; cmp != 0) return cmp; return std::partial_ordering::equivalent; } #endif bool operator==(DebugMarkerMarkerInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return (sType == rhs.sType) && (pNext == rhs.pNext) && ((pMarkerName == rhs.pMarkerName) || (strcmp(pMarkerName, rhs.pMarkerName) == 0)) && (color == rhs.color); } bool operator!=(DebugMarkerMarkerInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerMarkerInfoEXT; const void *pNext = {}; const char *pMarkerName = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D color = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT) == sizeof(VkDebugMarkerMarkerInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DebugMarkerMarkerInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = DebugMarkerMarkerInfoEXT; }; struct DebugMarkerObjectNameInfoEXT { using NativeType = VkDebugMarkerObjectNameInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectNameInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = {}, const char *pObjectName_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), objectType(objectType_), object(object_), pObjectName(pObjectName_) { } VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT(DebugMarkerObjectNameInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DebugMarkerObjectNameInfoEXT(VkDebugMarkerObjectNameInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : DebugMarkerObjectNameInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DebugMarkerObjectNameInfoEXT &operator=(DebugMarkerObjectNameInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DebugMarkerObjectNameInfoEXT &operator=(VkDebugMarkerObjectNameInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT &setObjectType(VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_) VULKAN_HPP_NOEXCEPT { objectType = objectType_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT &setObject(uint64_t object_) VULKAN_HPP_NOEXCEPT { object = object_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT &setPObjectName(const char *pObjectName_) VULKAN_HPP_NOEXCEPT { pObjectName = pObjectName_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDebugMarkerObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDebugMarkerObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, objectType, object, pObjectName); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) std::strong_ordering operator<=>(DebugMarkerObjectNameInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { if(auto cmp = sType <=> rhs.sType; cmp != 0) return cmp; if(auto cmp = pNext <=> rhs.pNext; cmp != 0) return cmp; if(auto cmp = objectType <=> rhs.objectType; cmp != 0) return cmp; if(auto cmp = object <=> rhs.object; cmp != 0) return cmp; if(pObjectName != rhs.pObjectName) if(auto cmp = strcmp(pObjectName, rhs.pObjectName); cmp != 0) return (cmp < 0) ? std::strong_ordering::less : std::strong_ordering::greater; return std::strong_ordering::equivalent; } #endif bool operator==(DebugMarkerObjectNameInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return (sType == rhs.sType) && (pNext == rhs.pNext) && (objectType == rhs.objectType) && (object == rhs.object) && ((pObjectName == rhs.pObjectName) || (strcmp(pObjectName, rhs.pObjectName) == 0)); } bool operator!=(DebugMarkerObjectNameInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; uint64_t object = {}; const char *pObjectName = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT) == sizeof(VkDebugMarkerObjectNameInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DebugMarkerObjectNameInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = DebugMarkerObjectNameInfoEXT; }; struct DebugMarkerObjectTagInfoEXT { using NativeType = VkDebugMarkerObjectTagInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectTagInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = {}, uint64_t tagName_ = {}, size_t tagSize_ = {}, const void *pTag_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), objectType(objectType_), object(object_), tagName(tagName_), tagSize(tagSize_), pTag(pTag_) { } VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT(DebugMarkerObjectTagInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DebugMarkerObjectTagInfoEXT(VkDebugMarkerObjectTagInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : DebugMarkerObjectTagInfoEXT(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) template DebugMarkerObjectTagInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, uint64_t object_, uint64_t tagName_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &tag_, const void *pNext_ = nullptr) : pNext(pNext_) , objectType(objectType_) , object(object_) , tagName(tagName_) , tagSize(tag_.size() * sizeof(T)) , pTag(tag_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DebugMarkerObjectTagInfoEXT &operator=(DebugMarkerObjectTagInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DebugMarkerObjectTagInfoEXT &operator=(VkDebugMarkerObjectTagInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT &setObjectType(VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_) VULKAN_HPP_NOEXCEPT { objectType = objectType_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT &setObject(uint64_t object_) VULKAN_HPP_NOEXCEPT { object = object_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT &setTagName(uint64_t tagName_) VULKAN_HPP_NOEXCEPT { tagName = tagName_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT &setTagSize(size_t tagSize_) VULKAN_HPP_NOEXCEPT { tagSize = tagSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT &setPTag(const void *pTag_) VULKAN_HPP_NOEXCEPT { pTag = pTag_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) template DebugMarkerObjectTagInfoEXT &setTag(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &tag_) VULKAN_HPP_NOEXCEPT { tagSize = tag_.size() * sizeof(T); pTag = tag_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDebugMarkerObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDebugMarkerObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, objectType, object, tagName, tagSize, pTag); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DebugMarkerObjectTagInfoEXT const &) const = default; #else bool operator==(DebugMarkerObjectTagInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (objectType == rhs.objectType) && (object == rhs.object) && (tagName == rhs.tagName) && (tagSize == rhs.tagSize) && (pTag == rhs.pTag); # endif } bool operator!=(DebugMarkerObjectTagInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; uint64_t object = {}; uint64_t tagName = {}; size_t tagSize = {}; const void *pTag = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT) == sizeof(VkDebugMarkerObjectTagInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DebugMarkerObjectTagInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = DebugMarkerObjectTagInfoEXT; }; struct DebugReportCallbackCreateInfoEXT { using NativeType = VkDebugReportCallbackCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugReportCallbackCreateInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ = {}, PFN_vkDebugReportCallbackEXT pfnCallback_ = {}, void *pUserData_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), pfnCallback(pfnCallback_), pUserData(pUserData_) { } VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT(DebugReportCallbackCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DebugReportCallbackCreateInfoEXT(VkDebugReportCallbackCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : DebugReportCallbackCreateInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DebugReportCallbackCreateInfoEXT &operator=(DebugReportCallbackCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DebugReportCallbackCreateInfoEXT &operator=(VkDebugReportCallbackCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT &setFlags(VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT &setPfnCallback(PFN_vkDebugReportCallbackEXT pfnCallback_) VULKAN_HPP_NOEXCEPT { pfnCallback = pfnCallback_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT &setPUserData(void *pUserData_) VULKAN_HPP_NOEXCEPT { pUserData = pUserData_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDebugReportCallbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDebugReportCallbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, pfnCallback, pUserData); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DebugReportCallbackCreateInfoEXT const &) const = default; #else bool operator==(DebugReportCallbackCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (pfnCallback == rhs.pfnCallback) && (pUserData == rhs.pUserData); # endif } bool operator!=(DebugReportCallbackCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags = {}; PFN_vkDebugReportCallbackEXT pfnCallback = {}; void *pUserData = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT) == sizeof(VkDebugReportCallbackCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DebugReportCallbackCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = DebugReportCallbackCreateInfoEXT; }; struct DebugUtilsLabelEXT { using NativeType = VkDebugUtilsLabelEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsLabelEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT(const char *pLabelName_ = {}, std::array const &color_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), pLabelName(pLabelName_), color(color_) { } VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT(DebugUtilsLabelEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DebugUtilsLabelEXT(VkDebugUtilsLabelEXT const &rhs) VULKAN_HPP_NOEXCEPT : DebugUtilsLabelEXT(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DebugUtilsLabelEXT &operator=(DebugUtilsLabelEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DebugUtilsLabelEXT &operator=(VkDebugUtilsLabelEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT &setPLabelName(const char *pLabelName_) VULKAN_HPP_NOEXCEPT { pLabelName = pLabelName_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT &setColor(std::array color_) VULKAN_HPP_NOEXCEPT { color = color_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDebugUtilsLabelEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDebugUtilsLabelEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, pLabelName, color); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) std::partial_ordering operator<=>(DebugUtilsLabelEXT const &rhs) const VULKAN_HPP_NOEXCEPT { if(auto cmp = sType <=> rhs.sType; cmp != 0) return cmp; if(auto cmp = pNext <=> rhs.pNext; cmp != 0) return cmp; if(pLabelName != rhs.pLabelName) if(auto cmp = strcmp(pLabelName, rhs.pLabelName); cmp != 0) return (cmp < 0) ? std::partial_ordering::less : std::partial_ordering::greater; if(auto cmp = color <=> rhs.color; cmp != 0) return cmp; return std::partial_ordering::equivalent; } #endif bool operator==(DebugUtilsLabelEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return (sType == rhs.sType) && (pNext == rhs.pNext) && ((pLabelName == rhs.pLabelName) || (strcmp(pLabelName, rhs.pLabelName) == 0)) && (color == rhs.color); } bool operator!=(DebugUtilsLabelEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsLabelEXT; const void *pNext = {}; const char *pLabelName = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D color = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT) == sizeof(VkDebugUtilsLabelEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DebugUtilsLabelEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = DebugUtilsLabelEXT; }; struct DebugUtilsObjectNameInfoEXT { using NativeType = VkDebugUtilsObjectNameInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectNameInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, uint64_t objectHandle_ = {}, const char *pObjectName_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), objectType(objectType_), objectHandle(objectHandle_), pObjectName(pObjectName_) { } VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT(DebugUtilsObjectNameInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DebugUtilsObjectNameInfoEXT(VkDebugUtilsObjectNameInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : DebugUtilsObjectNameInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DebugUtilsObjectNameInfoEXT &operator=(DebugUtilsObjectNameInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DebugUtilsObjectNameInfoEXT &operator=(VkDebugUtilsObjectNameInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT &setObjectType(VULKAN_HPP_NAMESPACE::ObjectType objectType_) VULKAN_HPP_NOEXCEPT { objectType = objectType_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT &setObjectHandle(uint64_t objectHandle_) VULKAN_HPP_NOEXCEPT { objectHandle = objectHandle_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT &setPObjectName(const char *pObjectName_) VULKAN_HPP_NOEXCEPT { pObjectName = pObjectName_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDebugUtilsObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDebugUtilsObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std:: tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, objectType, objectHandle, pObjectName); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) std::strong_ordering operator<=>(DebugUtilsObjectNameInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { if(auto cmp = sType <=> rhs.sType; cmp != 0) return cmp; if(auto cmp = pNext <=> rhs.pNext; cmp != 0) return cmp; if(auto cmp = objectType <=> rhs.objectType; cmp != 0) return cmp; if(auto cmp = objectHandle <=> rhs.objectHandle; cmp != 0) return cmp; if(pObjectName != rhs.pObjectName) if(auto cmp = strcmp(pObjectName, rhs.pObjectName); cmp != 0) return (cmp < 0) ? std::strong_ordering::less : std::strong_ordering::greater; return std::strong_ordering::equivalent; } #endif bool operator==(DebugUtilsObjectNameInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return (sType == rhs.sType) && (pNext == rhs.pNext) && (objectType == rhs.objectType) && (objectHandle == rhs.objectHandle) && ((pObjectName == rhs.pObjectName) || (strcmp(pObjectName, rhs.pObjectName) == 0)); } bool operator!=(DebugUtilsObjectNameInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; uint64_t objectHandle = {}; const char *pObjectName = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT) == sizeof(VkDebugUtilsObjectNameInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DebugUtilsObjectNameInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = DebugUtilsObjectNameInfoEXT; }; struct DebugUtilsMessengerCallbackDataEXT { using NativeType = VkDebugUtilsMessengerCallbackDataEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCallbackDataEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = {}, const char *pMessageIdName_ = {}, int32_t messageIdNumber_ = {}, const char *pMessage_ = {}, uint32_t queueLabelCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT *pQueueLabels_ = {}, uint32_t cmdBufLabelCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT *pCmdBufLabels_ = {}, uint32_t objectCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT *pObjects_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), pMessageIdName(pMessageIdName_), messageIdNumber(messageIdNumber_), pMessage(pMessage_), queueLabelCount(queueLabelCount_), pQueueLabels(pQueueLabels_), cmdBufLabelCount(cmdBufLabelCount_), pCmdBufLabels(pCmdBufLabels_), objectCount(objectCount_), pObjects(pObjects_) { } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT(DebugUtilsMessengerCallbackDataEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DebugUtilsMessengerCallbackDataEXT(VkDebugUtilsMessengerCallbackDataEXT const &rhs) VULKAN_HPP_NOEXCEPT : DebugUtilsMessengerCallbackDataEXT(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DebugUtilsMessengerCallbackDataEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_, const char *pMessageIdName_, int32_t messageIdNumber_, const char *pMessage_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &queueLabels_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &cmdBufLabels_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &objects_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , pMessageIdName(pMessageIdName_) , messageIdNumber(messageIdNumber_) , pMessage(pMessage_) , queueLabelCount(static_cast(queueLabels_.size())) , pQueueLabels(queueLabels_.data()) , cmdBufLabelCount(static_cast(cmdBufLabels_.size())) , pCmdBufLabels(cmdBufLabels_.data()) , objectCount(static_cast(objects_.size())) , pObjects(objects_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DebugUtilsMessengerCallbackDataEXT &operator=(DebugUtilsMessengerCallbackDataEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DebugUtilsMessengerCallbackDataEXT &operator=(VkDebugUtilsMessengerCallbackDataEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setFlags(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT &setPMessageIdName(const char *pMessageIdName_) VULKAN_HPP_NOEXCEPT { pMessageIdName = pMessageIdName_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT &setMessageIdNumber(int32_t messageIdNumber_) VULKAN_HPP_NOEXCEPT { messageIdNumber = messageIdNumber_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT &setPMessage(const char *pMessage_) VULKAN_HPP_NOEXCEPT { pMessage = pMessage_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT &setQueueLabelCount(uint32_t queueLabelCount_) VULKAN_HPP_NOEXCEPT { queueLabelCount = queueLabelCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPQueueLabels(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT *pQueueLabels_) VULKAN_HPP_NOEXCEPT { pQueueLabels = pQueueLabels_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DebugUtilsMessengerCallbackDataEXT & setQueueLabels(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &queueLabels_) VULKAN_HPP_NOEXCEPT { queueLabelCount = static_cast(queueLabels_.size()); pQueueLabels = queueLabels_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT &setCmdBufLabelCount(uint32_t cmdBufLabelCount_) VULKAN_HPP_NOEXCEPT { cmdBufLabelCount = cmdBufLabelCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPCmdBufLabels(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT *pCmdBufLabels_) VULKAN_HPP_NOEXCEPT { pCmdBufLabels = pCmdBufLabels_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DebugUtilsMessengerCallbackDataEXT & setCmdBufLabels(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &cmdBufLabels_) VULKAN_HPP_NOEXCEPT { cmdBufLabelCount = static_cast(cmdBufLabels_.size()); pCmdBufLabels = cmdBufLabels_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT &setObjectCount(uint32_t objectCount_) VULKAN_HPP_NOEXCEPT { objectCount = objectCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPObjects(const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT *pObjects_) VULKAN_HPP_NOEXCEPT { pObjects = pObjects_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DebugUtilsMessengerCallbackDataEXT & setObjects(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &objects_) VULKAN_HPP_NOEXCEPT { objectCount = static_cast(objects_.size()); pObjects = objects_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDebugUtilsMessengerCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDebugUtilsMessengerCallbackDataEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, pMessageIdName, messageIdNumber, pMessage, queueLabelCount, pQueueLabels, cmdBufLabelCount, pCmdBufLabels, objectCount, pObjects); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) std::strong_ordering operator<=>(DebugUtilsMessengerCallbackDataEXT const &rhs) const VULKAN_HPP_NOEXCEPT { if(auto cmp = sType <=> rhs.sType; cmp != 0) return cmp; if(auto cmp = pNext <=> rhs.pNext; cmp != 0) return cmp; if(auto cmp = flags <=> rhs.flags; cmp != 0) return cmp; if(pMessageIdName != rhs.pMessageIdName) if(auto cmp = strcmp(pMessageIdName, rhs.pMessageIdName); cmp != 0) return (cmp < 0) ? std::strong_ordering::less : std::strong_ordering::greater; if(auto cmp = messageIdNumber <=> rhs.messageIdNumber; cmp != 0) return cmp; if(pMessage != rhs.pMessage) if(auto cmp = strcmp(pMessage, rhs.pMessage); cmp != 0) return (cmp < 0) ? std::strong_ordering::less : std::strong_ordering::greater; if(auto cmp = queueLabelCount <=> rhs.queueLabelCount; cmp != 0) return cmp; if(auto cmp = pQueueLabels <=> rhs.pQueueLabels; cmp != 0) return cmp; if(auto cmp = cmdBufLabelCount <=> rhs.cmdBufLabelCount; cmp != 0) return cmp; if(auto cmp = pCmdBufLabels <=> rhs.pCmdBufLabels; cmp != 0) return cmp; if(auto cmp = objectCount <=> rhs.objectCount; cmp != 0) return cmp; if(auto cmp = pObjects <=> rhs.pObjects; cmp != 0) return cmp; return std::strong_ordering::equivalent; } #endif bool operator==(DebugUtilsMessengerCallbackDataEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && ((pMessageIdName == rhs.pMessageIdName) || (strcmp(pMessageIdName, rhs.pMessageIdName) == 0)) && (messageIdNumber == rhs.messageIdNumber) && ((pMessage == rhs.pMessage) || (strcmp(pMessage, rhs.pMessage) == 0)) && (queueLabelCount == rhs.queueLabelCount) && (pQueueLabels == rhs.pQueueLabels) && (cmdBufLabelCount == rhs.cmdBufLabelCount) && (pCmdBufLabels == rhs.pCmdBufLabels) && (objectCount == rhs.objectCount) && (pObjects == rhs.pObjects); } bool operator!=(DebugUtilsMessengerCallbackDataEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags = {}; const char *pMessageIdName = {}; int32_t messageIdNumber = {}; const char *pMessage = {}; uint32_t queueLabelCount = {}; const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT *pQueueLabels = {}; uint32_t cmdBufLabelCount = {}; const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT *pCmdBufLabels = {}; uint32_t objectCount = {}; const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT *pObjects = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT) == sizeof(VkDebugUtilsMessengerCallbackDataEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DebugUtilsMessengerCallbackDataEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = DebugUtilsMessengerCallbackDataEXT; }; struct DebugUtilsMessengerCreateInfoEXT { using NativeType = VkDebugUtilsMessengerCreateInfoEXT; static const bool allowDuplicate = true; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCreateInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {}, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ = {}, PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = {}, void *pUserData_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), messageSeverity(messageSeverity_), messageType(messageType_), pfnUserCallback(pfnUserCallback_), pUserData(pUserData_) { } VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT(DebugUtilsMessengerCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DebugUtilsMessengerCreateInfoEXT(VkDebugUtilsMessengerCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : DebugUtilsMessengerCreateInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DebugUtilsMessengerCreateInfoEXT &operator=(DebugUtilsMessengerCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DebugUtilsMessengerCreateInfoEXT &operator=(VkDebugUtilsMessengerCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT &setFlags(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setMessageSeverity(VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_) VULKAN_HPP_NOEXCEPT { messageSeverity = messageSeverity_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setMessageType(VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_) VULKAN_HPP_NOEXCEPT { messageType = messageType_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT &setPfnUserCallback(PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_) VULKAN_HPP_NOEXCEPT { pfnUserCallback = pfnUserCallback_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT &setPUserData(void *pUserData_) VULKAN_HPP_NOEXCEPT { pUserData = pUserData_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDebugUtilsMessengerCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDebugUtilsMessengerCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, messageSeverity, messageType, pfnUserCallback, pUserData); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DebugUtilsMessengerCreateInfoEXT const &) const = default; #else bool operator==(DebugUtilsMessengerCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (messageSeverity == rhs.messageSeverity) && (messageType == rhs.messageType) && (pfnUserCallback == rhs.pfnUserCallback) && (pUserData == rhs.pUserData); # endif } bool operator!=(DebugUtilsMessengerCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags = {}; VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity = {}; VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType = {}; PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback = {}; void *pUserData = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT) == sizeof(VkDebugUtilsMessengerCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DebugUtilsMessengerCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = DebugUtilsMessengerCreateInfoEXT; }; struct DebugUtilsObjectTagInfoEXT { using NativeType = VkDebugUtilsObjectTagInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectTagInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, uint64_t objectHandle_ = {}, uint64_t tagName_ = {}, size_t tagSize_ = {}, const void *pTag_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), objectType(objectType_), objectHandle(objectHandle_), tagName(tagName_), tagSize(tagSize_), pTag(pTag_) { } VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT(DebugUtilsObjectTagInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DebugUtilsObjectTagInfoEXT(VkDebugUtilsObjectTagInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : DebugUtilsObjectTagInfoEXT(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) template DebugUtilsObjectTagInfoEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle_, uint64_t tagName_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &tag_, const void *pNext_ = nullptr) : pNext(pNext_) , objectType(objectType_) , objectHandle(objectHandle_) , tagName(tagName_) , tagSize(tag_.size() * sizeof(T)) , pTag(tag_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DebugUtilsObjectTagInfoEXT &operator=(DebugUtilsObjectTagInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DebugUtilsObjectTagInfoEXT &operator=(VkDebugUtilsObjectTagInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT &setObjectType(VULKAN_HPP_NAMESPACE::ObjectType objectType_) VULKAN_HPP_NOEXCEPT { objectType = objectType_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT &setObjectHandle(uint64_t objectHandle_) VULKAN_HPP_NOEXCEPT { objectHandle = objectHandle_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT &setTagName(uint64_t tagName_) VULKAN_HPP_NOEXCEPT { tagName = tagName_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT &setTagSize(size_t tagSize_) VULKAN_HPP_NOEXCEPT { tagSize = tagSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT &setPTag(const void *pTag_) VULKAN_HPP_NOEXCEPT { pTag = pTag_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) template DebugUtilsObjectTagInfoEXT &setTag(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &tag_) VULKAN_HPP_NOEXCEPT { tagSize = tag_.size() * sizeof(T); pTag = tag_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDebugUtilsObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDebugUtilsObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, objectType, objectHandle, tagName, tagSize, pTag); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DebugUtilsObjectTagInfoEXT const &) const = default; #else bool operator==(DebugUtilsObjectTagInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (objectType == rhs.objectType) && (objectHandle == rhs.objectHandle) && (tagName == rhs.tagName) && (tagSize == rhs.tagSize) && (pTag == rhs.pTag); # endif } bool operator!=(DebugUtilsObjectTagInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; uint64_t objectHandle = {}; uint64_t tagName = {}; size_t tagSize = {}; const void *pTag = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT) == sizeof(VkDebugUtilsObjectTagInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DebugUtilsObjectTagInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = DebugUtilsObjectTagInfoEXT; }; struct DedicatedAllocationBufferCreateInfoNV { using NativeType = VkDedicatedAllocationBufferCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationBufferCreateInfoNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), dedicatedAllocation(dedicatedAllocation_) { } VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV(DedicatedAllocationBufferCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; DedicatedAllocationBufferCreateInfoNV(VkDedicatedAllocationBufferCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : DedicatedAllocationBufferCreateInfoNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DedicatedAllocationBufferCreateInfoNV &operator=(DedicatedAllocationBufferCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; DedicatedAllocationBufferCreateInfoNV &operator=(VkDedicatedAllocationBufferCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV & setDedicatedAllocation(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_) VULKAN_HPP_NOEXCEPT { dedicatedAllocation = dedicatedAllocation_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDedicatedAllocationBufferCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDedicatedAllocationBufferCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, dedicatedAllocation); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DedicatedAllocationBufferCreateInfoNV const &) const = default; #else bool operator==(DedicatedAllocationBufferCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (dedicatedAllocation == rhs.dedicatedAllocation); # endif } bool operator!=(DedicatedAllocationBufferCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationBufferCreateInfoNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV) == sizeof(VkDedicatedAllocationBufferCreateInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DedicatedAllocationBufferCreateInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = DedicatedAllocationBufferCreateInfoNV; }; struct DedicatedAllocationImageCreateInfoNV { using NativeType = VkDedicatedAllocationImageCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationImageCreateInfoNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), dedicatedAllocation(dedicatedAllocation_) { } VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV(DedicatedAllocationImageCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; DedicatedAllocationImageCreateInfoNV(VkDedicatedAllocationImageCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : DedicatedAllocationImageCreateInfoNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DedicatedAllocationImageCreateInfoNV &operator=(DedicatedAllocationImageCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; DedicatedAllocationImageCreateInfoNV &operator=(VkDedicatedAllocationImageCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV & setDedicatedAllocation(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_) VULKAN_HPP_NOEXCEPT { dedicatedAllocation = dedicatedAllocation_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDedicatedAllocationImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDedicatedAllocationImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, dedicatedAllocation); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DedicatedAllocationImageCreateInfoNV const &) const = default; #else bool operator==(DedicatedAllocationImageCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (dedicatedAllocation == rhs.dedicatedAllocation); # endif } bool operator!=(DedicatedAllocationImageCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationImageCreateInfoNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV) == sizeof(VkDedicatedAllocationImageCreateInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DedicatedAllocationImageCreateInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = DedicatedAllocationImageCreateInfoNV; }; struct DedicatedAllocationMemoryAllocateInfoNV { using NativeType = VkDedicatedAllocationMemoryAllocateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV(VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), image(image_), buffer(buffer_) { } VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV(DedicatedAllocationMemoryAllocateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; DedicatedAllocationMemoryAllocateInfoNV(VkDedicatedAllocationMemoryAllocateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : DedicatedAllocationMemoryAllocateInfoNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DedicatedAllocationMemoryAllocateInfoNV &operator=(DedicatedAllocationMemoryAllocateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; DedicatedAllocationMemoryAllocateInfoNV &operator=(VkDedicatedAllocationMemoryAllocateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV &setImage(VULKAN_HPP_NAMESPACE::Image image_) VULKAN_HPP_NOEXCEPT { image = image_; return *this; } VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV &setBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer_) VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDedicatedAllocationMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDedicatedAllocationMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, image, buffer); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DedicatedAllocationMemoryAllocateInfoNV const &) const = default; #else bool operator==(DedicatedAllocationMemoryAllocateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (image == rhs.image) && (buffer == rhs.buffer); # endif } bool operator!=(DedicatedAllocationMemoryAllocateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Image image = {}; VULKAN_HPP_NAMESPACE::Buffer buffer = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV) == sizeof(VkDedicatedAllocationMemoryAllocateInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DedicatedAllocationMemoryAllocateInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = DedicatedAllocationMemoryAllocateInfoNV; }; struct MemoryBarrier2 { using NativeType = VkMemoryBarrier2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR MemoryBarrier2(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), srcStageMask(srcStageMask_), srcAccessMask(srcAccessMask_), dstStageMask(dstStageMask_), dstAccessMask(dstAccessMask_) { } VULKAN_HPP_CONSTEXPR MemoryBarrier2(MemoryBarrier2 const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryBarrier2(VkMemoryBarrier2 const &rhs) VULKAN_HPP_NOEXCEPT : MemoryBarrier2(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ MemoryBarrier2 &operator=(MemoryBarrier2 const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryBarrier2 &operator=(VkMemoryBarrier2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 &setSrcStageMask(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_) VULKAN_HPP_NOEXCEPT { srcStageMask = srcStageMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 &setSrcAccessMask(VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_) VULKAN_HPP_NOEXCEPT { srcAccessMask = srcAccessMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 &setDstStageMask(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_) VULKAN_HPP_NOEXCEPT { dstStageMask = dstStageMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 &setDstAccessMask(VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_) VULKAN_HPP_NOEXCEPT { dstAccessMask = dstAccessMask_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, srcStageMask, srcAccessMask, dstStageMask, dstAccessMask); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(MemoryBarrier2 const &) const = default; #else bool operator==(MemoryBarrier2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (srcStageMask == rhs.srcStageMask) && (srcAccessMask == rhs.srcAccessMask) && (dstStageMask == rhs.dstStageMask) && (dstAccessMask == rhs.dstAccessMask); # endif } bool operator!=(MemoryBarrier2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier2; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask = {}; VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask = {}; VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask = {}; VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::MemoryBarrier2) == sizeof(VkMemoryBarrier2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "MemoryBarrier2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = MemoryBarrier2; }; using MemoryBarrier2KHR = MemoryBarrier2; struct ImageSubresourceRange { using NativeType = VkImageSubresourceRange; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImageSubresourceRange(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t baseMipLevel_ = {}, uint32_t levelCount_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {}) VULKAN_HPP_NOEXCEPT : aspectMask(aspectMask_), baseMipLevel(baseMipLevel_), levelCount(levelCount_), baseArrayLayer(baseArrayLayer_), layerCount(layerCount_) { } VULKAN_HPP_CONSTEXPR ImageSubresourceRange(ImageSubresourceRange const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageSubresourceRange(VkImageSubresourceRange const &rhs) VULKAN_HPP_NOEXCEPT : ImageSubresourceRange(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageSubresourceRange &operator=(ImageSubresourceRange const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageSubresourceRange &operator=(VkImageSubresourceRange const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange &setAspectMask(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_) VULKAN_HPP_NOEXCEPT { aspectMask = aspectMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange &setBaseMipLevel(uint32_t baseMipLevel_) VULKAN_HPP_NOEXCEPT { baseMipLevel = baseMipLevel_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange &setLevelCount(uint32_t levelCount_) VULKAN_HPP_NOEXCEPT { levelCount = levelCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange &setBaseArrayLayer(uint32_t baseArrayLayer_) VULKAN_HPP_NOEXCEPT { baseArrayLayer = baseArrayLayer_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange &setLayerCount(uint32_t layerCount_) VULKAN_HPP_NOEXCEPT { layerCount = layerCount_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImageSubresourceRange const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(aspectMask, baseMipLevel, levelCount, baseArrayLayer, layerCount); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImageSubresourceRange const &) const = default; #else bool operator==(ImageSubresourceRange const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (aspectMask == rhs.aspectMask) && (baseMipLevel == rhs.baseMipLevel) && (levelCount == rhs.levelCount) && (baseArrayLayer == rhs.baseArrayLayer) && (layerCount == rhs.layerCount); # endif } bool operator!=(ImageSubresourceRange const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; uint32_t baseMipLevel = {}; uint32_t levelCount = {}; uint32_t baseArrayLayer = {}; uint32_t layerCount = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImageSubresourceRange) == sizeof(VkImageSubresourceRange), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImageSubresourceRange is not nothrow_move_constructible!"); struct ImageMemoryBarrier2 { using NativeType = VkImageMemoryBarrier2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), srcStageMask(srcStageMask_), srcAccessMask(srcAccessMask_), dstStageMask(dstStageMask_), dstAccessMask(dstAccessMask_), oldLayout(oldLayout_), newLayout(newLayout_), srcQueueFamilyIndex(srcQueueFamilyIndex_), dstQueueFamilyIndex(dstQueueFamilyIndex_), image(image_), subresourceRange(subresourceRange_) { } VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2(ImageMemoryBarrier2 const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageMemoryBarrier2(VkImageMemoryBarrier2 const &rhs) VULKAN_HPP_NOEXCEPT : ImageMemoryBarrier2(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageMemoryBarrier2 &operator=(ImageMemoryBarrier2 const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageMemoryBarrier2 &operator=(VkImageMemoryBarrier2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 &setSrcStageMask(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_) VULKAN_HPP_NOEXCEPT { srcStageMask = srcStageMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 &setSrcAccessMask(VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_) VULKAN_HPP_NOEXCEPT { srcAccessMask = srcAccessMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 &setDstStageMask(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_) VULKAN_HPP_NOEXCEPT { dstStageMask = dstStageMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 &setDstAccessMask(VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_) VULKAN_HPP_NOEXCEPT { dstAccessMask = dstAccessMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 &setOldLayout(VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_) VULKAN_HPP_NOEXCEPT { oldLayout = oldLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 &setNewLayout(VULKAN_HPP_NAMESPACE::ImageLayout newLayout_) VULKAN_HPP_NOEXCEPT { newLayout = newLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 &setSrcQueueFamilyIndex(uint32_t srcQueueFamilyIndex_) VULKAN_HPP_NOEXCEPT { srcQueueFamilyIndex = srcQueueFamilyIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 &setDstQueueFamilyIndex(uint32_t dstQueueFamilyIndex_) VULKAN_HPP_NOEXCEPT { dstQueueFamilyIndex = dstQueueFamilyIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 &setImage(VULKAN_HPP_NAMESPACE::Image image_) VULKAN_HPP_NOEXCEPT { image = image_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSubresourceRange(VULKAN_HPP_NAMESPACE::ImageSubresourceRange const &subresourceRange_) VULKAN_HPP_NOEXCEPT { subresourceRange = subresourceRange_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImageMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImageMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, srcStageMask, srcAccessMask, dstStageMask, dstAccessMask, oldLayout, newLayout, srcQueueFamilyIndex, dstQueueFamilyIndex, image, subresourceRange); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImageMemoryBarrier2 const &) const = default; #else bool operator==(ImageMemoryBarrier2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (srcStageMask == rhs.srcStageMask) && (srcAccessMask == rhs.srcAccessMask) && (dstStageMask == rhs.dstStageMask) && (dstAccessMask == rhs.dstAccessMask) && (oldLayout == rhs.oldLayout) && (newLayout == rhs.newLayout) && (srcQueueFamilyIndex == rhs.srcQueueFamilyIndex) && (dstQueueFamilyIndex == rhs.dstQueueFamilyIndex) && (image == rhs.image) && (subresourceRange == rhs.subresourceRange); # endif } bool operator!=(ImageMemoryBarrier2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier2; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask = {}; VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask = {}; VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask = {}; VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask = {}; VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; uint32_t srcQueueFamilyIndex = {}; uint32_t dstQueueFamilyIndex = {}; VULKAN_HPP_NAMESPACE::Image image = {}; VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2) == sizeof(VkImageMemoryBarrier2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImageMemoryBarrier2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImageMemoryBarrier2; }; using ImageMemoryBarrier2KHR = ImageMemoryBarrier2; struct DependencyInfo { using NativeType = VkDependencyInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDependencyInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DependencyInfo(VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, uint32_t memoryBarrierCount_ = {}, const VULKAN_HPP_NAMESPACE::MemoryBarrier2 *pMemoryBarriers_ = {}, uint32_t bufferMemoryBarrierCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 *pBufferMemoryBarriers_ = {}, uint32_t imageMemoryBarrierCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 *pImageMemoryBarriers_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), dependencyFlags(dependencyFlags_), memoryBarrierCount(memoryBarrierCount_), pMemoryBarriers(pMemoryBarriers_), bufferMemoryBarrierCount(bufferMemoryBarrierCount_), pBufferMemoryBarriers(pBufferMemoryBarriers_), imageMemoryBarrierCount(imageMemoryBarrierCount_), pImageMemoryBarriers(pImageMemoryBarriers_) { } VULKAN_HPP_CONSTEXPR DependencyInfo(DependencyInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DependencyInfo(VkDependencyInfo const &rhs) VULKAN_HPP_NOEXCEPT : DependencyInfo(*reinterpret_cast(&rhs)) {} # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DependencyInfo(VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &memoryBarriers_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &bufferMemoryBarriers_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &imageMemoryBarriers_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , dependencyFlags(dependencyFlags_) , memoryBarrierCount(static_cast(memoryBarriers_.size())) , pMemoryBarriers(memoryBarriers_.data()) , bufferMemoryBarrierCount(static_cast(bufferMemoryBarriers_.size())) , pBufferMemoryBarriers(bufferMemoryBarriers_.data()) , imageMemoryBarrierCount(static_cast(imageMemoryBarriers_.size())) , pImageMemoryBarriers(imageMemoryBarriers_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DependencyInfo &operator=(DependencyInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DependencyInfo &operator=(VkDependencyInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DependencyInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DependencyInfo &setDependencyFlags(VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_) VULKAN_HPP_NOEXCEPT { dependencyFlags = dependencyFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DependencyInfo &setMemoryBarrierCount(uint32_t memoryBarrierCount_) VULKAN_HPP_NOEXCEPT { memoryBarrierCount = memoryBarrierCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DependencyInfo &setPMemoryBarriers(const VULKAN_HPP_NAMESPACE::MemoryBarrier2 *pMemoryBarriers_) VULKAN_HPP_NOEXCEPT { pMemoryBarriers = pMemoryBarriers_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DependencyInfo & setMemoryBarriers(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &memoryBarriers_) VULKAN_HPP_NOEXCEPT { memoryBarrierCount = static_cast(memoryBarriers_.size()); pMemoryBarriers = memoryBarriers_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 DependencyInfo &setBufferMemoryBarrierCount(uint32_t bufferMemoryBarrierCount_) VULKAN_HPP_NOEXCEPT { bufferMemoryBarrierCount = bufferMemoryBarrierCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPBufferMemoryBarriers(const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 *pBufferMemoryBarriers_) VULKAN_HPP_NOEXCEPT { pBufferMemoryBarriers = pBufferMemoryBarriers_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DependencyInfo &setBufferMemoryBarriers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &bufferMemoryBarriers_) VULKAN_HPP_NOEXCEPT { bufferMemoryBarrierCount = static_cast(bufferMemoryBarriers_.size()); pBufferMemoryBarriers = bufferMemoryBarriers_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 DependencyInfo &setImageMemoryBarrierCount(uint32_t imageMemoryBarrierCount_) VULKAN_HPP_NOEXCEPT { imageMemoryBarrierCount = imageMemoryBarrierCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPImageMemoryBarriers(const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 *pImageMemoryBarriers_) VULKAN_HPP_NOEXCEPT { pImageMemoryBarriers = pImageMemoryBarriers_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DependencyInfo &setImageMemoryBarriers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &imageMemoryBarriers_) VULKAN_HPP_NOEXCEPT { imageMemoryBarrierCount = static_cast(imageMemoryBarriers_.size()); pImageMemoryBarriers = imageMemoryBarriers_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDependencyInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDependencyInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DependencyInfo const &) const = default; #else bool operator==(DependencyInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (dependencyFlags == rhs.dependencyFlags) && (memoryBarrierCount == rhs.memoryBarrierCount) && (pMemoryBarriers == rhs.pMemoryBarriers) && (bufferMemoryBarrierCount == rhs.bufferMemoryBarrierCount) && (pBufferMemoryBarriers == rhs.pBufferMemoryBarriers) && (imageMemoryBarrierCount == rhs.imageMemoryBarrierCount) && (pImageMemoryBarriers == rhs.pImageMemoryBarriers); # endif } bool operator!=(DependencyInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDependencyInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; uint32_t memoryBarrierCount = {}; const VULKAN_HPP_NAMESPACE::MemoryBarrier2 *pMemoryBarriers = {}; uint32_t bufferMemoryBarrierCount = {}; const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 *pBufferMemoryBarriers = {}; uint32_t imageMemoryBarrierCount = {}; const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 *pImageMemoryBarriers = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DependencyInfo) == sizeof(VkDependencyInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DependencyInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = DependencyInfo; }; using DependencyInfoKHR = DependencyInfo; struct DescriptorBufferInfo { using NativeType = VkDescriptorBufferInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DescriptorBufferInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize range_ = {}) VULKAN_HPP_NOEXCEPT : buffer(buffer_), offset(offset_), range(range_) { } VULKAN_HPP_CONSTEXPR DescriptorBufferInfo(DescriptorBufferInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorBufferInfo(VkDescriptorBufferInfo const &rhs) VULKAN_HPP_NOEXCEPT : DescriptorBufferInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DescriptorBufferInfo &operator=(DescriptorBufferInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorBufferInfo &operator=(VkDescriptorBufferInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo &setBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer_) VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo &setOffset(VULKAN_HPP_NAMESPACE::DeviceSize offset_) VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo &setRange(VULKAN_HPP_NAMESPACE::DeviceSize range_) VULKAN_HPP_NOEXCEPT { range = range_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDescriptorBufferInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDescriptorBufferInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(buffer, offset, range); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DescriptorBufferInfo const &) const = default; #else bool operator==(DescriptorBufferInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (buffer == rhs.buffer) && (offset == rhs.offset) && (range == rhs.range); # endif } bool operator!=(DescriptorBufferInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::Buffer buffer = {}; VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; VULKAN_HPP_NAMESPACE::DeviceSize range = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DescriptorBufferInfo) == sizeof(VkDescriptorBufferInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DescriptorBufferInfo is not nothrow_move_constructible!"); struct DescriptorImageInfo { using NativeType = VkDescriptorImageInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DescriptorImageInfo(VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT : sampler(sampler_), imageView(imageView_), imageLayout(imageLayout_) { } VULKAN_HPP_CONSTEXPR DescriptorImageInfo(DescriptorImageInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorImageInfo(VkDescriptorImageInfo const &rhs) VULKAN_HPP_NOEXCEPT : DescriptorImageInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DescriptorImageInfo &operator=(DescriptorImageInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorImageInfo &operator=(VkDescriptorImageInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo &setSampler(VULKAN_HPP_NAMESPACE::Sampler sampler_) VULKAN_HPP_NOEXCEPT { sampler = sampler_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo &setImageView(VULKAN_HPP_NAMESPACE::ImageView imageView_) VULKAN_HPP_NOEXCEPT { imageView = imageView_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo &setImageLayout(VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_) VULKAN_HPP_NOEXCEPT { imageLayout = imageLayout_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDescriptorImageInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDescriptorImageInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sampler, imageView, imageLayout); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DescriptorImageInfo const &) const = default; #else bool operator==(DescriptorImageInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sampler == rhs.sampler) && (imageView == rhs.imageView) && (imageLayout == rhs.imageLayout); # endif } bool operator!=(DescriptorImageInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::Sampler sampler = {}; VULKAN_HPP_NAMESPACE::ImageView imageView = {}; VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DescriptorImageInfo) == sizeof(VkDescriptorImageInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DescriptorImageInfo is not nothrow_move_constructible!"); struct DescriptorPoolSize { using NativeType = VkDescriptorPoolSize; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DescriptorPoolSize(VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, uint32_t descriptorCount_ = {}) VULKAN_HPP_NOEXCEPT : type(type_), descriptorCount(descriptorCount_) { } VULKAN_HPP_CONSTEXPR DescriptorPoolSize(DescriptorPoolSize const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorPoolSize(VkDescriptorPoolSize const &rhs) VULKAN_HPP_NOEXCEPT : DescriptorPoolSize(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DescriptorPoolSize &operator=(DescriptorPoolSize const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorPoolSize &operator=(VkDescriptorPoolSize const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize &setType(VULKAN_HPP_NAMESPACE::DescriptorType type_) VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize &setDescriptorCount(uint32_t descriptorCount_) VULKAN_HPP_NOEXCEPT { descriptorCount = descriptorCount_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDescriptorPoolSize const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDescriptorPoolSize &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(type, descriptorCount); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DescriptorPoolSize const &) const = default; #else bool operator==(DescriptorPoolSize const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (type == rhs.type) && (descriptorCount == rhs.descriptorCount); # endif } bool operator!=(DescriptorPoolSize const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::DescriptorType type = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; uint32_t descriptorCount = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DescriptorPoolSize) == sizeof(VkDescriptorPoolSize), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DescriptorPoolSize is not nothrow_move_constructible!"); struct DescriptorPoolCreateInfo { using NativeType = VkDescriptorPoolCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ = {}, uint32_t maxSets_ = {}, uint32_t poolSizeCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorPoolSize *pPoolSizes_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), maxSets(maxSets_), poolSizeCount(poolSizeCount_), pPoolSizes(pPoolSizes_) { } VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo(DescriptorPoolCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorPoolCreateInfo(VkDescriptorPoolCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : DescriptorPoolCreateInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DescriptorPoolCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_, uint32_t maxSets_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &poolSizes_, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , maxSets(maxSets_) , poolSizeCount(static_cast(poolSizes_.size())) , pPoolSizes(poolSizes_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DescriptorPoolCreateInfo &operator=(DescriptorPoolCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorPoolCreateInfo &operator=(VkDescriptorPoolCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo &setFlags(VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo &setMaxSets(uint32_t maxSets_) VULKAN_HPP_NOEXCEPT { maxSets = maxSets_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo &setPoolSizeCount(uint32_t poolSizeCount_) VULKAN_HPP_NOEXCEPT { poolSizeCount = poolSizeCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo &setPPoolSizes(const VULKAN_HPP_NAMESPACE::DescriptorPoolSize *pPoolSizes_) VULKAN_HPP_NOEXCEPT { pPoolSizes = pPoolSizes_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DescriptorPoolCreateInfo & setPoolSizes(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &poolSizes_) VULKAN_HPP_NOEXCEPT { poolSizeCount = static_cast(poolSizes_.size()); pPoolSizes = poolSizes_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDescriptorPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDescriptorPoolCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, maxSets, poolSizeCount, pPoolSizes); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DescriptorPoolCreateInfo const &) const = default; #else bool operator==(DescriptorPoolCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (maxSets == rhs.maxSets) && (poolSizeCount == rhs.poolSizeCount) && (pPoolSizes == rhs.pPoolSizes); # endif } bool operator!=(DescriptorPoolCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags = {}; uint32_t maxSets = {}; uint32_t poolSizeCount = {}; const VULKAN_HPP_NAMESPACE::DescriptorPoolSize *pPoolSizes = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo) == sizeof(VkDescriptorPoolCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DescriptorPoolCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = DescriptorPoolCreateInfo; }; struct DescriptorPoolInlineUniformBlockCreateInfo { using NativeType = VkDescriptorPoolInlineUniformBlockCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfo(uint32_t maxInlineUniformBlockBindings_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), maxInlineUniformBlockBindings(maxInlineUniformBlockBindings_) { } VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfo(DescriptorPoolInlineUniformBlockCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorPoolInlineUniformBlockCreateInfo(VkDescriptorPoolInlineUniformBlockCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : DescriptorPoolInlineUniformBlockCreateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DescriptorPoolInlineUniformBlockCreateInfo &operator=(DescriptorPoolInlineUniformBlockCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorPoolInlineUniformBlockCreateInfo &operator=(VkDescriptorPoolInlineUniformBlockCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfo & setMaxInlineUniformBlockBindings(uint32_t maxInlineUniformBlockBindings_) VULKAN_HPP_NOEXCEPT { maxInlineUniformBlockBindings = maxInlineUniformBlockBindings_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDescriptorPoolInlineUniformBlockCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDescriptorPoolInlineUniformBlockCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, maxInlineUniformBlockBindings); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DescriptorPoolInlineUniformBlockCreateInfo const &) const = default; #else bool operator==(DescriptorPoolInlineUniformBlockCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (maxInlineUniformBlockBindings == rhs.maxInlineUniformBlockBindings); # endif } bool operator!=(DescriptorPoolInlineUniformBlockCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfo; const void *pNext = {}; uint32_t maxInlineUniformBlockBindings = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo) == sizeof(VkDescriptorPoolInlineUniformBlockCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DescriptorPoolInlineUniformBlockCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = DescriptorPoolInlineUniformBlockCreateInfo; }; using DescriptorPoolInlineUniformBlockCreateInfoEXT = DescriptorPoolInlineUniformBlockCreateInfo; struct DescriptorSetAllocateInfo { using NativeType = VkDescriptorSetAllocateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetAllocateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ = {}, uint32_t descriptorSetCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayout *pSetLayouts_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), descriptorPool(descriptorPool_), descriptorSetCount(descriptorSetCount_), pSetLayouts(pSetLayouts_) { } VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo(DescriptorSetAllocateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorSetAllocateInfo(VkDescriptorSetAllocateInfo const &rhs) VULKAN_HPP_NOEXCEPT : DescriptorSetAllocateInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DescriptorSetAllocateInfo(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &setLayouts_, const void *pNext_ = nullptr) : pNext(pNext_) , descriptorPool(descriptorPool_) , descriptorSetCount(static_cast(setLayouts_.size())) , pSetLayouts(setLayouts_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DescriptorSetAllocateInfo &operator=(DescriptorSetAllocateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorSetAllocateInfo &operator=(VkDescriptorSetAllocateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo &setDescriptorPool(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_) VULKAN_HPP_NOEXCEPT { descriptorPool = descriptorPool_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo &setDescriptorSetCount(uint32_t descriptorSetCount_) VULKAN_HPP_NOEXCEPT { descriptorSetCount = descriptorSetCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo &setPSetLayouts(const VULKAN_HPP_NAMESPACE::DescriptorSetLayout *pSetLayouts_) VULKAN_HPP_NOEXCEPT { pSetLayouts = pSetLayouts_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DescriptorSetAllocateInfo & setSetLayouts(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &setLayouts_) VULKAN_HPP_NOEXCEPT { descriptorSetCount = static_cast(setLayouts_.size()); pSetLayouts = setLayouts_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDescriptorSetAllocateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDescriptorSetAllocateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, descriptorPool, descriptorSetCount, pSetLayouts); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DescriptorSetAllocateInfo const &) const = default; #else bool operator==(DescriptorSetAllocateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (descriptorPool == rhs.descriptorPool) && (descriptorSetCount == rhs.descriptorSetCount) && (pSetLayouts == rhs.pSetLayouts); # endif } bool operator!=(DescriptorSetAllocateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetAllocateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool = {}; uint32_t descriptorSetCount = {}; const VULKAN_HPP_NAMESPACE::DescriptorSetLayout *pSetLayouts = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo) == sizeof(VkDescriptorSetAllocateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DescriptorSetAllocateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = DescriptorSetAllocateInfo; }; struct DescriptorSetBindingReferenceVALVE { using NativeType = VkDescriptorSetBindingReferenceVALVE; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetBindingReferenceVALVE; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DescriptorSetBindingReferenceVALVE(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, uint32_t binding_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), descriptorSetLayout(descriptorSetLayout_), binding(binding_) { } VULKAN_HPP_CONSTEXPR DescriptorSetBindingReferenceVALVE(DescriptorSetBindingReferenceVALVE const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorSetBindingReferenceVALVE(VkDescriptorSetBindingReferenceVALVE const &rhs) VULKAN_HPP_NOEXCEPT : DescriptorSetBindingReferenceVALVE(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DescriptorSetBindingReferenceVALVE &operator=(DescriptorSetBindingReferenceVALVE const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorSetBindingReferenceVALVE &operator=(VkDescriptorSetBindingReferenceVALVE const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE & setDescriptorSetLayout(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_) VULKAN_HPP_NOEXCEPT { descriptorSetLayout = descriptorSetLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE &setBinding(uint32_t binding_) VULKAN_HPP_NOEXCEPT { binding = binding_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDescriptorSetBindingReferenceVALVE const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDescriptorSetBindingReferenceVALVE &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, descriptorSetLayout, binding); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DescriptorSetBindingReferenceVALVE const &) const = default; #else bool operator==(DescriptorSetBindingReferenceVALVE const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (descriptorSetLayout == rhs.descriptorSetLayout) && (binding == rhs.binding); # endif } bool operator!=(DescriptorSetBindingReferenceVALVE const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetBindingReferenceVALVE; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout = {}; uint32_t binding = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE) == sizeof(VkDescriptorSetBindingReferenceVALVE), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DescriptorSetBindingReferenceVALVE is not nothrow_move_constructible!"); template<> struct CppType { using Type = DescriptorSetBindingReferenceVALVE; }; struct DescriptorSetLayoutBinding { using NativeType = VkDescriptorSetLayoutBinding; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding(uint32_t binding_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, const VULKAN_HPP_NAMESPACE::Sampler *pImmutableSamplers_ = {}) VULKAN_HPP_NOEXCEPT : binding(binding_), descriptorType(descriptorType_), descriptorCount(descriptorCount_), stageFlags(stageFlags_), pImmutableSamplers(pImmutableSamplers_) { } VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding(DescriptorSetLayoutBinding const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorSetLayoutBinding(VkDescriptorSetLayoutBinding const &rhs) VULKAN_HPP_NOEXCEPT : DescriptorSetLayoutBinding(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DescriptorSetLayoutBinding(uint32_t binding_, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &immutableSamplers_) : binding(binding_) , descriptorType(descriptorType_) , descriptorCount(static_cast(immutableSamplers_.size())) , stageFlags(stageFlags_) , pImmutableSamplers(immutableSamplers_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DescriptorSetLayoutBinding &operator=(DescriptorSetLayoutBinding const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorSetLayoutBinding &operator=(VkDescriptorSetLayoutBinding const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding &setBinding(uint32_t binding_) VULKAN_HPP_NOEXCEPT { binding = binding_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding &setDescriptorType(VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_) VULKAN_HPP_NOEXCEPT { descriptorType = descriptorType_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding &setDescriptorCount(uint32_t descriptorCount_) VULKAN_HPP_NOEXCEPT { descriptorCount = descriptorCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding &setStageFlags(VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_) VULKAN_HPP_NOEXCEPT { stageFlags = stageFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding &setPImmutableSamplers(const VULKAN_HPP_NAMESPACE::Sampler *pImmutableSamplers_) VULKAN_HPP_NOEXCEPT { pImmutableSamplers = pImmutableSamplers_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DescriptorSetLayoutBinding & setImmutableSamplers(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &immutableSamplers_) VULKAN_HPP_NOEXCEPT { descriptorCount = static_cast(immutableSamplers_.size()); pImmutableSamplers = immutableSamplers_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDescriptorSetLayoutBinding const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDescriptorSetLayoutBinding &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(binding, descriptorType, descriptorCount, stageFlags, pImmutableSamplers); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DescriptorSetLayoutBinding const &) const = default; #else bool operator==(DescriptorSetLayoutBinding const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (binding == rhs.binding) && (descriptorType == rhs.descriptorType) && (descriptorCount == rhs.descriptorCount) && (stageFlags == rhs.stageFlags) && (pImmutableSamplers == rhs.pImmutableSamplers); # endif } bool operator!=(DescriptorSetLayoutBinding const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t binding = {}; VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; uint32_t descriptorCount = {}; VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; const VULKAN_HPP_NAMESPACE::Sampler *pImmutableSamplers = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding) == sizeof(VkDescriptorSetLayoutBinding), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DescriptorSetLayoutBinding is not nothrow_move_constructible!"); struct DescriptorSetLayoutBindingFlagsCreateInfo { using NativeType = VkDescriptorSetLayoutBindingFlagsCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo(uint32_t bindingCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags *pBindingFlags_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), bindingCount(bindingCount_), pBindingFlags(pBindingFlags_) { } VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo(DescriptorSetLayoutBindingFlagsCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorSetLayoutBindingFlagsCreateInfo(VkDescriptorSetLayoutBindingFlagsCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : DescriptorSetLayoutBindingFlagsCreateInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DescriptorSetLayoutBindingFlagsCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &bindingFlags_, const void *pNext_ = nullptr) : pNext(pNext_) , bindingCount(static_cast(bindingFlags_.size())) , pBindingFlags(bindingFlags_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DescriptorSetLayoutBindingFlagsCreateInfo &operator=(DescriptorSetLayoutBindingFlagsCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorSetLayoutBindingFlagsCreateInfo &operator=(VkDescriptorSetLayoutBindingFlagsCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo &setBindingCount(uint32_t bindingCount_) VULKAN_HPP_NOEXCEPT { bindingCount = bindingCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & setPBindingFlags(const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags *pBindingFlags_) VULKAN_HPP_NOEXCEPT { pBindingFlags = pBindingFlags_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DescriptorSetLayoutBindingFlagsCreateInfo &setBindingFlags( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &bindingFlags_) VULKAN_HPP_NOEXCEPT { bindingCount = static_cast(bindingFlags_.size()); pBindingFlags = bindingFlags_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDescriptorSetLayoutBindingFlagsCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDescriptorSetLayoutBindingFlagsCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std:: tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, bindingCount, pBindingFlags); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DescriptorSetLayoutBindingFlagsCreateInfo const &) const = default; #else bool operator==(DescriptorSetLayoutBindingFlagsCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (bindingCount == rhs.bindingCount) && (pBindingFlags == rhs.pBindingFlags); # endif } bool operator!=(DescriptorSetLayoutBindingFlagsCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo; const void *pNext = {}; uint32_t bindingCount = {}; const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags *pBindingFlags = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo) == sizeof(VkDescriptorSetLayoutBindingFlagsCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DescriptorSetLayoutBindingFlagsCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = DescriptorSetLayoutBindingFlagsCreateInfo; }; using DescriptorSetLayoutBindingFlagsCreateInfoEXT = DescriptorSetLayoutBindingFlagsCreateInfo; struct DescriptorSetLayoutCreateInfo { using NativeType = VkDescriptorSetLayoutCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ = {}, uint32_t bindingCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding *pBindings_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), bindingCount(bindingCount_), pBindings(pBindings_) { } VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo(DescriptorSetLayoutCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorSetLayoutCreateInfo(VkDescriptorSetLayoutCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : DescriptorSetLayoutCreateInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DescriptorSetLayoutCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &bindings_, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , bindingCount(static_cast(bindings_.size())) , pBindings(bindings_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DescriptorSetLayoutCreateInfo &operator=(DescriptorSetLayoutCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorSetLayoutCreateInfo &operator=(VkDescriptorSetLayoutCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo &setFlags(VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo &setBindingCount(uint32_t bindingCount_) VULKAN_HPP_NOEXCEPT { bindingCount = bindingCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setPBindings(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding *pBindings_) VULKAN_HPP_NOEXCEPT { pBindings = pBindings_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DescriptorSetLayoutCreateInfo & setBindings(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &bindings_) VULKAN_HPP_NOEXCEPT { bindingCount = static_cast(bindings_.size()); pBindings = bindings_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDescriptorSetLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDescriptorSetLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, bindingCount, pBindings); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DescriptorSetLayoutCreateInfo const &) const = default; #else bool operator==(DescriptorSetLayoutCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (bindingCount == rhs.bindingCount) && (pBindings == rhs.pBindings); # endif } bool operator!=(DescriptorSetLayoutCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags = {}; uint32_t bindingCount = {}; const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding *pBindings = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo) == sizeof(VkDescriptorSetLayoutCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DescriptorSetLayoutCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = DescriptorSetLayoutCreateInfo; }; struct DescriptorSetLayoutHostMappingInfoVALVE { using NativeType = VkDescriptorSetLayoutHostMappingInfoVALVE; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutHostMappingInfoVALVE; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DescriptorSetLayoutHostMappingInfoVALVE(size_t descriptorOffset_ = {}, uint32_t descriptorSize_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), descriptorOffset(descriptorOffset_), descriptorSize(descriptorSize_) { } VULKAN_HPP_CONSTEXPR DescriptorSetLayoutHostMappingInfoVALVE(DescriptorSetLayoutHostMappingInfoVALVE const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorSetLayoutHostMappingInfoVALVE(VkDescriptorSetLayoutHostMappingInfoVALVE const &rhs) VULKAN_HPP_NOEXCEPT : DescriptorSetLayoutHostMappingInfoVALVE(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DescriptorSetLayoutHostMappingInfoVALVE &operator=(DescriptorSetLayoutHostMappingInfoVALVE const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorSetLayoutHostMappingInfoVALVE &operator=(VkDescriptorSetLayoutHostMappingInfoVALVE const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE &setDescriptorOffset(size_t descriptorOffset_) VULKAN_HPP_NOEXCEPT { descriptorOffset = descriptorOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE &setDescriptorSize(uint32_t descriptorSize_) VULKAN_HPP_NOEXCEPT { descriptorSize = descriptorSize_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDescriptorSetLayoutHostMappingInfoVALVE const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDescriptorSetLayoutHostMappingInfoVALVE &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, descriptorOffset, descriptorSize); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DescriptorSetLayoutHostMappingInfoVALVE const &) const = default; #else bool operator==(DescriptorSetLayoutHostMappingInfoVALVE const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (descriptorOffset == rhs.descriptorOffset) && (descriptorSize == rhs.descriptorSize); # endif } bool operator!=(DescriptorSetLayoutHostMappingInfoVALVE const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutHostMappingInfoVALVE; void *pNext = {}; size_t descriptorOffset = {}; uint32_t descriptorSize = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE) == sizeof(VkDescriptorSetLayoutHostMappingInfoVALVE), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DescriptorSetLayoutHostMappingInfoVALVE is not nothrow_move_constructible!"); template<> struct CppType { using Type = DescriptorSetLayoutHostMappingInfoVALVE; }; struct DescriptorSetLayoutSupport { using NativeType = VkDescriptorSetLayoutSupport; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutSupport; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport(VULKAN_HPP_NAMESPACE::Bool32 supported_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), supported(supported_) { } VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport(DescriptorSetLayoutSupport const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorSetLayoutSupport(VkDescriptorSetLayoutSupport const &rhs) VULKAN_HPP_NOEXCEPT : DescriptorSetLayoutSupport(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DescriptorSetLayoutSupport &operator=(DescriptorSetLayoutSupport const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorSetLayoutSupport &operator=(VkDescriptorSetLayoutSupport const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkDescriptorSetLayoutSupport const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDescriptorSetLayoutSupport &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, supported); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DescriptorSetLayoutSupport const &) const = default; #else bool operator==(DescriptorSetLayoutSupport const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (supported == rhs.supported); # endif } bool operator!=(DescriptorSetLayoutSupport const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutSupport; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 supported = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport) == sizeof(VkDescriptorSetLayoutSupport), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DescriptorSetLayoutSupport is not nothrow_move_constructible!"); template<> struct CppType { using Type = DescriptorSetLayoutSupport; }; using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport; struct DescriptorSetVariableDescriptorCountAllocateInfo { using NativeType = VkDescriptorSetVariableDescriptorCountAllocateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo(uint32_t descriptorSetCount_ = {}, const uint32_t *pDescriptorCounts_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), descriptorSetCount(descriptorSetCount_), pDescriptorCounts(pDescriptorCounts_) { } VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo(DescriptorSetVariableDescriptorCountAllocateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorSetVariableDescriptorCountAllocateInfo(VkDescriptorSetVariableDescriptorCountAllocateInfo const &rhs) VULKAN_HPP_NOEXCEPT : DescriptorSetVariableDescriptorCountAllocateInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DescriptorSetVariableDescriptorCountAllocateInfo(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &descriptorCounts_, const void *pNext_ = nullptr) : pNext(pNext_) , descriptorSetCount(static_cast(descriptorCounts_.size())) , pDescriptorCounts(descriptorCounts_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DescriptorSetVariableDescriptorCountAllocateInfo &operator=(DescriptorSetVariableDescriptorCountAllocateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorSetVariableDescriptorCountAllocateInfo &operator=(VkDescriptorSetVariableDescriptorCountAllocateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo &setDescriptorSetCount(uint32_t descriptorSetCount_) VULKAN_HPP_NOEXCEPT { descriptorSetCount = descriptorSetCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo &setPDescriptorCounts(const uint32_t *pDescriptorCounts_) VULKAN_HPP_NOEXCEPT { pDescriptorCounts = pDescriptorCounts_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorCounts(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &descriptorCounts_) VULKAN_HPP_NOEXCEPT { descriptorSetCount = static_cast(descriptorCounts_.size()); pDescriptorCounts = descriptorCounts_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDescriptorSetVariableDescriptorCountAllocateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDescriptorSetVariableDescriptorCountAllocateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, descriptorSetCount, pDescriptorCounts); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DescriptorSetVariableDescriptorCountAllocateInfo const &) const = default; #else bool operator==(DescriptorSetVariableDescriptorCountAllocateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (descriptorSetCount == rhs.descriptorSetCount) && (pDescriptorCounts == rhs.pDescriptorCounts); # endif } bool operator!=(DescriptorSetVariableDescriptorCountAllocateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo; const void *pNext = {}; uint32_t descriptorSetCount = {}; const uint32_t *pDescriptorCounts = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo) == sizeof(VkDescriptorSetVariableDescriptorCountAllocateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DescriptorSetVariableDescriptorCountAllocateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = DescriptorSetVariableDescriptorCountAllocateInfo; }; using DescriptorSetVariableDescriptorCountAllocateInfoEXT = DescriptorSetVariableDescriptorCountAllocateInfo; struct DescriptorSetVariableDescriptorCountLayoutSupport { using NativeType = VkDescriptorSetVariableDescriptorCountLayoutSupport; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport(uint32_t maxVariableDescriptorCount_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), maxVariableDescriptorCount(maxVariableDescriptorCount_) { } VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport(DescriptorSetVariableDescriptorCountLayoutSupport const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorSetVariableDescriptorCountLayoutSupport(VkDescriptorSetVariableDescriptorCountLayoutSupport const &rhs) VULKAN_HPP_NOEXCEPT : DescriptorSetVariableDescriptorCountLayoutSupport(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DescriptorSetVariableDescriptorCountLayoutSupport & operator=(DescriptorSetVariableDescriptorCountLayoutSupport const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorSetVariableDescriptorCountLayoutSupport &operator=(VkDescriptorSetVariableDescriptorCountLayoutSupport const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkDescriptorSetVariableDescriptorCountLayoutSupport const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDescriptorSetVariableDescriptorCountLayoutSupport &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, maxVariableDescriptorCount); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DescriptorSetVariableDescriptorCountLayoutSupport const &) const = default; #else bool operator==(DescriptorSetVariableDescriptorCountLayoutSupport const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (maxVariableDescriptorCount == rhs.maxVariableDescriptorCount); # endif } bool operator!=(DescriptorSetVariableDescriptorCountLayoutSupport const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport; void *pNext = {}; uint32_t maxVariableDescriptorCount = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport) == sizeof(VkDescriptorSetVariableDescriptorCountLayoutSupport), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DescriptorSetVariableDescriptorCountLayoutSupport is not nothrow_move_constructible!"); template<> struct CppType { using Type = DescriptorSetVariableDescriptorCountLayoutSupport; }; using DescriptorSetVariableDescriptorCountLayoutSupportEXT = DescriptorSetVariableDescriptorCountLayoutSupport; struct DescriptorUpdateTemplateEntry { using NativeType = VkDescriptorUpdateTemplateEntry; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry(uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, size_t offset_ = {}, size_t stride_ = {}) VULKAN_HPP_NOEXCEPT : dstBinding(dstBinding_), dstArrayElement(dstArrayElement_), descriptorCount(descriptorCount_), descriptorType(descriptorType_), offset(offset_), stride(stride_) { } VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry(DescriptorUpdateTemplateEntry const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorUpdateTemplateEntry(VkDescriptorUpdateTemplateEntry const &rhs) VULKAN_HPP_NOEXCEPT : DescriptorUpdateTemplateEntry(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DescriptorUpdateTemplateEntry &operator=(DescriptorUpdateTemplateEntry const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorUpdateTemplateEntry &operator=(VkDescriptorUpdateTemplateEntry const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry &setDstBinding(uint32_t dstBinding_) VULKAN_HPP_NOEXCEPT { dstBinding = dstBinding_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry &setDstArrayElement(uint32_t dstArrayElement_) VULKAN_HPP_NOEXCEPT { dstArrayElement = dstArrayElement_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry &setDescriptorCount(uint32_t descriptorCount_) VULKAN_HPP_NOEXCEPT { descriptorCount = descriptorCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry &setDescriptorType(VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_) VULKAN_HPP_NOEXCEPT { descriptorType = descriptorType_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry &setOffset(size_t offset_) VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry &setStride(size_t stride_) VULKAN_HPP_NOEXCEPT { stride = stride_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDescriptorUpdateTemplateEntry const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDescriptorUpdateTemplateEntry &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(dstBinding, dstArrayElement, descriptorCount, descriptorType, offset, stride); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DescriptorUpdateTemplateEntry const &) const = default; #else bool operator==(DescriptorUpdateTemplateEntry const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (dstBinding == rhs.dstBinding) && (dstArrayElement == rhs.dstArrayElement) && (descriptorCount == rhs.descriptorCount) && (descriptorType == rhs.descriptorType) && (offset == rhs.offset) && (stride == rhs.stride); # endif } bool operator!=(DescriptorUpdateTemplateEntry const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t dstBinding = {}; uint32_t dstArrayElement = {}; uint32_t descriptorCount = {}; VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; size_t offset = {}; size_t stride = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry) == sizeof(VkDescriptorUpdateTemplateEntry), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DescriptorUpdateTemplateEntry is not nothrow_move_constructible!"); using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry; struct DescriptorUpdateTemplateCreateInfo { using NativeType = VkDescriptorUpdateTemplateCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorUpdateTemplateCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ = {}, uint32_t descriptorUpdateEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry *pDescriptorUpdateEntries_ = {}, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, uint32_t set_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), descriptorUpdateEntryCount(descriptorUpdateEntryCount_), pDescriptorUpdateEntries(pDescriptorUpdateEntries_), templateType(templateType_), descriptorSetLayout(descriptorSetLayout_), pipelineBindPoint(pipelineBindPoint_), pipelineLayout(pipelineLayout_), set(set_) { } VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo(DescriptorUpdateTemplateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorUpdateTemplateCreateInfo(VkDescriptorUpdateTemplateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : DescriptorUpdateTemplateCreateInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DescriptorUpdateTemplateCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &descriptorUpdateEntries_, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, uint32_t set_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , descriptorUpdateEntryCount(static_cast(descriptorUpdateEntries_.size())) , pDescriptorUpdateEntries(descriptorUpdateEntries_.data()) , templateType(templateType_) , descriptorSetLayout(descriptorSetLayout_) , pipelineBindPoint(pipelineBindPoint_) , pipelineLayout(pipelineLayout_) , set(set_) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DescriptorUpdateTemplateCreateInfo &operator=(DescriptorUpdateTemplateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DescriptorUpdateTemplateCreateInfo &operator=(VkDescriptorUpdateTemplateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setFlags(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo &setDescriptorUpdateEntryCount(uint32_t descriptorUpdateEntryCount_) VULKAN_HPP_NOEXCEPT { descriptorUpdateEntryCount = descriptorUpdateEntryCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPDescriptorUpdateEntries(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry *pDescriptorUpdateEntries_) VULKAN_HPP_NOEXCEPT { pDescriptorUpdateEntries = pDescriptorUpdateEntries_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DescriptorUpdateTemplateCreateInfo &setDescriptorUpdateEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &descriptorUpdateEntries_) VULKAN_HPP_NOEXCEPT { descriptorUpdateEntryCount = static_cast(descriptorUpdateEntries_.size()); pDescriptorUpdateEntries = descriptorUpdateEntries_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setTemplateType(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_) VULKAN_HPP_NOEXCEPT { templateType = templateType_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setDescriptorSetLayout(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_) VULKAN_HPP_NOEXCEPT { descriptorSetLayout = descriptorSetLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPipelineBindPoint(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_) VULKAN_HPP_NOEXCEPT { pipelineBindPoint = pipelineBindPoint_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo &setPipelineLayout(VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_) VULKAN_HPP_NOEXCEPT { pipelineLayout = pipelineLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo &setSet(uint32_t set_) VULKAN_HPP_NOEXCEPT { set = set_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDescriptorUpdateTemplateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDescriptorUpdateTemplateCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, descriptorUpdateEntryCount, pDescriptorUpdateEntries, templateType, descriptorSetLayout, pipelineBindPoint, pipelineLayout, set); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DescriptorUpdateTemplateCreateInfo const &) const = default; #else bool operator==(DescriptorUpdateTemplateCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount) && (pDescriptorUpdateEntries == rhs.pDescriptorUpdateEntries) && (templateType == rhs.templateType) && (descriptorSetLayout == rhs.descriptorSetLayout) && (pipelineBindPoint == rhs.pipelineBindPoint) && (pipelineLayout == rhs.pipelineLayout) && (set == rhs.set); # endif } bool operator!=(DescriptorUpdateTemplateCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags = {}; uint32_t descriptorUpdateEntryCount = {}; const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry *pDescriptorUpdateEntries = {}; VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet; VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout = {}; VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {}; uint32_t set = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo) == sizeof(VkDescriptorUpdateTemplateCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DescriptorUpdateTemplateCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = DescriptorUpdateTemplateCreateInfo; }; using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo; struct DeviceBufferMemoryRequirements { using NativeType = VkDeviceBufferMemoryRequirements; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceBufferMemoryRequirements; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DeviceBufferMemoryRequirements(const VULKAN_HPP_NAMESPACE::BufferCreateInfo *pCreateInfo_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), pCreateInfo(pCreateInfo_) { } VULKAN_HPP_CONSTEXPR DeviceBufferMemoryRequirements(DeviceBufferMemoryRequirements const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceBufferMemoryRequirements(VkDeviceBufferMemoryRequirements const &rhs) VULKAN_HPP_NOEXCEPT : DeviceBufferMemoryRequirements(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DeviceBufferMemoryRequirements &operator=(DeviceBufferMemoryRequirements const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceBufferMemoryRequirements &operator=(VkDeviceBufferMemoryRequirements const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DeviceBufferMemoryRequirements &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceBufferMemoryRequirements &setPCreateInfo(const VULKAN_HPP_NAMESPACE::BufferCreateInfo *pCreateInfo_) VULKAN_HPP_NOEXCEPT { pCreateInfo = pCreateInfo_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDeviceBufferMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDeviceBufferMemoryRequirements &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, pCreateInfo); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DeviceBufferMemoryRequirements const &) const = default; #else bool operator==(DeviceBufferMemoryRequirements const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (pCreateInfo == rhs.pCreateInfo); # endif } bool operator!=(DeviceBufferMemoryRequirements const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceBufferMemoryRequirements; const void *pNext = {}; const VULKAN_HPP_NAMESPACE::BufferCreateInfo *pCreateInfo = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements) == sizeof(VkDeviceBufferMemoryRequirements), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DeviceBufferMemoryRequirements is not nothrow_move_constructible!"); template<> struct CppType { using Type = DeviceBufferMemoryRequirements; }; using DeviceBufferMemoryRequirementsKHR = DeviceBufferMemoryRequirements; struct DeviceQueueCreateInfo { using NativeType = VkDeviceQueueCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo(VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}, uint32_t queueCount_ = {}, const float *pQueuePriorities_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), queueFamilyIndex(queueFamilyIndex_), queueCount(queueCount_), pQueuePriorities(pQueuePriorities_) { } VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo(DeviceQueueCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceQueueCreateInfo(VkDeviceQueueCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : DeviceQueueCreateInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DeviceQueueCreateInfo(VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_, uint32_t queueFamilyIndex_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &queuePriorities_, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , queueFamilyIndex(queueFamilyIndex_) , queueCount(static_cast(queuePriorities_.size())) , pQueuePriorities(queuePriorities_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DeviceQueueCreateInfo &operator=(DeviceQueueCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceQueueCreateInfo &operator=(VkDeviceQueueCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo &setFlags(VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo &setQueueFamilyIndex(uint32_t queueFamilyIndex_) VULKAN_HPP_NOEXCEPT { queueFamilyIndex = queueFamilyIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo &setQueueCount(uint32_t queueCount_) VULKAN_HPP_NOEXCEPT { queueCount = queueCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo &setPQueuePriorities(const float *pQueuePriorities_) VULKAN_HPP_NOEXCEPT { pQueuePriorities = pQueuePriorities_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DeviceQueueCreateInfo &setQueuePriorities(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &queuePriorities_) VULKAN_HPP_NOEXCEPT { queueCount = static_cast(queuePriorities_.size()); pQueuePriorities = queuePriorities_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDeviceQueueCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDeviceQueueCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, queueFamilyIndex, queueCount, pQueuePriorities); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DeviceQueueCreateInfo const &) const = default; #else bool operator==(DeviceQueueCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (queueFamilyIndex == rhs.queueFamilyIndex) && (queueCount == rhs.queueCount) && (pQueuePriorities == rhs.pQueuePriorities); # endif } bool operator!=(DeviceQueueCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {}; uint32_t queueFamilyIndex = {}; uint32_t queueCount = {}; const float *pQueuePriorities = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo) == sizeof(VkDeviceQueueCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DeviceQueueCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = DeviceQueueCreateInfo; }; struct PhysicalDeviceFeatures { using NativeType = VkPhysicalDeviceFeatures; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures(VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ = {}, VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ = {}, VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ = {}, VULKAN_HPP_NAMESPACE::Bool32 logicOp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ = {}, VULKAN_HPP_NAMESPACE::Bool32 wideLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 largePoints_ = {}, VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ = {}, VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ = {}, VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ = {}, VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = {}) VULKAN_HPP_NOEXCEPT : robustBufferAccess(robustBufferAccess_), fullDrawIndexUint32(fullDrawIndexUint32_), imageCubeArray(imageCubeArray_), independentBlend(independentBlend_), geometryShader(geometryShader_), tessellationShader(tessellationShader_), sampleRateShading(sampleRateShading_), dualSrcBlend(dualSrcBlend_), logicOp(logicOp_), multiDrawIndirect(multiDrawIndirect_), drawIndirectFirstInstance(drawIndirectFirstInstance_), depthClamp(depthClamp_), depthBiasClamp(depthBiasClamp_), fillModeNonSolid(fillModeNonSolid_), depthBounds(depthBounds_), wideLines(wideLines_), largePoints(largePoints_), alphaToOne(alphaToOne_), multiViewport(multiViewport_), samplerAnisotropy(samplerAnisotropy_), textureCompressionETC2(textureCompressionETC2_), textureCompressionASTC_LDR(textureCompressionASTC_LDR_), textureCompressionBC(textureCompressionBC_), occlusionQueryPrecise(occlusionQueryPrecise_), pipelineStatisticsQuery(pipelineStatisticsQuery_), vertexPipelineStoresAndAtomics(vertexPipelineStoresAndAtomics_), fragmentStoresAndAtomics(fragmentStoresAndAtomics_), shaderTessellationAndGeometryPointSize(shaderTessellationAndGeometryPointSize_), shaderImageGatherExtended(shaderImageGatherExtended_), shaderStorageImageExtendedFormats(shaderStorageImageExtendedFormats_), shaderStorageImageMultisample(shaderStorageImageMultisample_), shaderStorageImageReadWithoutFormat(shaderStorageImageReadWithoutFormat_), shaderStorageImageWriteWithoutFormat(shaderStorageImageWriteWithoutFormat_), shaderUniformBufferArrayDynamicIndexing(shaderUniformBufferArrayDynamicIndexing_), shaderSampledImageArrayDynamicIndexing(shaderSampledImageArrayDynamicIndexing_), shaderStorageBufferArrayDynamicIndexing(shaderStorageBufferArrayDynamicIndexing_), shaderStorageImageArrayDynamicIndexing(shaderStorageImageArrayDynamicIndexing_), shaderClipDistance(shaderClipDistance_), shaderCullDistance(shaderCullDistance_), shaderFloat64(shaderFloat64_), shaderInt64(shaderInt64_), shaderInt16(shaderInt16_), shaderResourceResidency(shaderResourceResidency_), shaderResourceMinLod(shaderResourceMinLod_), sparseBinding(sparseBinding_), sparseResidencyBuffer(sparseResidencyBuffer_), sparseResidencyImage2D(sparseResidencyImage2D_), sparseResidencyImage3D(sparseResidencyImage3D_), sparseResidency2Samples(sparseResidency2Samples_), sparseResidency4Samples(sparseResidency4Samples_), sparseResidency8Samples(sparseResidency8Samples_), sparseResidency16Samples(sparseResidency16Samples_), sparseResidencyAliased(sparseResidencyAliased_), variableMultisampleRate(variableMultisampleRate_), inheritedQueries(inheritedQueries_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures(PhysicalDeviceFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFeatures(VkPhysicalDeviceFeatures const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFeatures(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceFeatures &operator=(PhysicalDeviceFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFeatures &operator=(VkPhysicalDeviceFeatures const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setRobustBufferAccess(VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_) VULKAN_HPP_NOEXCEPT { robustBufferAccess = robustBufferAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setFullDrawIndexUint32(VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_) VULKAN_HPP_NOEXCEPT { fullDrawIndexUint32 = fullDrawIndexUint32_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setImageCubeArray(VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_) VULKAN_HPP_NOEXCEPT { imageCubeArray = imageCubeArray_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setIndependentBlend(VULKAN_HPP_NAMESPACE::Bool32 independentBlend_) VULKAN_HPP_NOEXCEPT { independentBlend = independentBlend_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setGeometryShader(VULKAN_HPP_NAMESPACE::Bool32 geometryShader_) VULKAN_HPP_NOEXCEPT { geometryShader = geometryShader_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setTessellationShader(VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_) VULKAN_HPP_NOEXCEPT { tessellationShader = tessellationShader_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setSampleRateShading(VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_) VULKAN_HPP_NOEXCEPT { sampleRateShading = sampleRateShading_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setDualSrcBlend(VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_) VULKAN_HPP_NOEXCEPT { dualSrcBlend = dualSrcBlend_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setLogicOp(VULKAN_HPP_NAMESPACE::Bool32 logicOp_) VULKAN_HPP_NOEXCEPT { logicOp = logicOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setMultiDrawIndirect(VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_) VULKAN_HPP_NOEXCEPT { multiDrawIndirect = multiDrawIndirect_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setDrawIndirectFirstInstance(VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_) VULKAN_HPP_NOEXCEPT { drawIndirectFirstInstance = drawIndirectFirstInstance_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setDepthClamp(VULKAN_HPP_NAMESPACE::Bool32 depthClamp_) VULKAN_HPP_NOEXCEPT { depthClamp = depthClamp_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setDepthBiasClamp(VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_) VULKAN_HPP_NOEXCEPT { depthBiasClamp = depthBiasClamp_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setFillModeNonSolid(VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_) VULKAN_HPP_NOEXCEPT { fillModeNonSolid = fillModeNonSolid_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setDepthBounds(VULKAN_HPP_NAMESPACE::Bool32 depthBounds_) VULKAN_HPP_NOEXCEPT { depthBounds = depthBounds_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setWideLines(VULKAN_HPP_NAMESPACE::Bool32 wideLines_) VULKAN_HPP_NOEXCEPT { wideLines = wideLines_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setLargePoints(VULKAN_HPP_NAMESPACE::Bool32 largePoints_) VULKAN_HPP_NOEXCEPT { largePoints = largePoints_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setAlphaToOne(VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_) VULKAN_HPP_NOEXCEPT { alphaToOne = alphaToOne_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setMultiViewport(VULKAN_HPP_NAMESPACE::Bool32 multiViewport_) VULKAN_HPP_NOEXCEPT { multiViewport = multiViewport_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setSamplerAnisotropy(VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_) VULKAN_HPP_NOEXCEPT { samplerAnisotropy = samplerAnisotropy_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setTextureCompressionETC2(VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_) VULKAN_HPP_NOEXCEPT { textureCompressionETC2 = textureCompressionETC2_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTextureCompressionASTC_LDR(VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_) VULKAN_HPP_NOEXCEPT { textureCompressionASTC_LDR = textureCompressionASTC_LDR_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setTextureCompressionBC(VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_) VULKAN_HPP_NOEXCEPT { textureCompressionBC = textureCompressionBC_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setOcclusionQueryPrecise(VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_) VULKAN_HPP_NOEXCEPT { occlusionQueryPrecise = occlusionQueryPrecise_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setPipelineStatisticsQuery(VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_) VULKAN_HPP_NOEXCEPT { pipelineStatisticsQuery = pipelineStatisticsQuery_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setVertexPipelineStoresAndAtomics(VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_) VULKAN_HPP_NOEXCEPT { vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setFragmentStoresAndAtomics(VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_) VULKAN_HPP_NOEXCEPT { fragmentStoresAndAtomics = fragmentStoresAndAtomics_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderTessellationAndGeometryPointSize(VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_) VULKAN_HPP_NOEXCEPT { shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setShaderImageGatherExtended(VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_) VULKAN_HPP_NOEXCEPT { shaderImageGatherExtended = shaderImageGatherExtended_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageExtendedFormats(VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_) VULKAN_HPP_NOEXCEPT { shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageMultisample(VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_) VULKAN_HPP_NOEXCEPT { shaderStorageImageMultisample = shaderStorageImageMultisample_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageReadWithoutFormat(VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_) VULKAN_HPP_NOEXCEPT { shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageWriteWithoutFormat(VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_) VULKAN_HPP_NOEXCEPT { shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderUniformBufferArrayDynamicIndexing(VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_) VULKAN_HPP_NOEXCEPT { shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderSampledImageArrayDynamicIndexing(VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_) VULKAN_HPP_NOEXCEPT { shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageBufferArrayDynamicIndexing(VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_) VULKAN_HPP_NOEXCEPT { shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageArrayDynamicIndexing(VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_) VULKAN_HPP_NOEXCEPT { shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setShaderClipDistance(VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_) VULKAN_HPP_NOEXCEPT { shaderClipDistance = shaderClipDistance_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setShaderCullDistance(VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_) VULKAN_HPP_NOEXCEPT { shaderCullDistance = shaderCullDistance_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setShaderFloat64(VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_) VULKAN_HPP_NOEXCEPT { shaderFloat64 = shaderFloat64_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setShaderInt64(VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_) VULKAN_HPP_NOEXCEPT { shaderInt64 = shaderInt64_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setShaderInt16(VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_) VULKAN_HPP_NOEXCEPT { shaderInt16 = shaderInt16_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setShaderResourceResidency(VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_) VULKAN_HPP_NOEXCEPT { shaderResourceResidency = shaderResourceResidency_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setShaderResourceMinLod(VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_) VULKAN_HPP_NOEXCEPT { shaderResourceMinLod = shaderResourceMinLod_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setSparseBinding(VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_) VULKAN_HPP_NOEXCEPT { sparseBinding = sparseBinding_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setSparseResidencyBuffer(VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_) VULKAN_HPP_NOEXCEPT { sparseResidencyBuffer = sparseResidencyBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setSparseResidencyImage2D(VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_) VULKAN_HPP_NOEXCEPT { sparseResidencyImage2D = sparseResidencyImage2D_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setSparseResidencyImage3D(VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_) VULKAN_HPP_NOEXCEPT { sparseResidencyImage3D = sparseResidencyImage3D_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setSparseResidency2Samples(VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_) VULKAN_HPP_NOEXCEPT { sparseResidency2Samples = sparseResidency2Samples_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setSparseResidency4Samples(VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_) VULKAN_HPP_NOEXCEPT { sparseResidency4Samples = sparseResidency4Samples_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setSparseResidency8Samples(VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_) VULKAN_HPP_NOEXCEPT { sparseResidency8Samples = sparseResidency8Samples_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setSparseResidency16Samples(VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_) VULKAN_HPP_NOEXCEPT { sparseResidency16Samples = sparseResidency16Samples_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setSparseResidencyAliased(VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_) VULKAN_HPP_NOEXCEPT { sparseResidencyAliased = sparseResidencyAliased_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setVariableMultisampleRate(VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_) VULKAN_HPP_NOEXCEPT { variableMultisampleRate = variableMultisampleRate_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &setInheritedQueries(VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_) VULKAN_HPP_NOEXCEPT { inheritedQueries = inheritedQueries_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(robustBufferAccess, fullDrawIndexUint32, imageCubeArray, independentBlend, geometryShader, tessellationShader, sampleRateShading, dualSrcBlend, logicOp, multiDrawIndirect, drawIndirectFirstInstance, depthClamp, depthBiasClamp, fillModeNonSolid, depthBounds, wideLines, largePoints, alphaToOne, multiViewport, samplerAnisotropy, textureCompressionETC2, textureCompressionASTC_LDR, textureCompressionBC, occlusionQueryPrecise, pipelineStatisticsQuery, vertexPipelineStoresAndAtomics, fragmentStoresAndAtomics, shaderTessellationAndGeometryPointSize, shaderImageGatherExtended, shaderStorageImageExtendedFormats, shaderStorageImageMultisample, shaderStorageImageReadWithoutFormat, shaderStorageImageWriteWithoutFormat, shaderUniformBufferArrayDynamicIndexing, shaderSampledImageArrayDynamicIndexing, shaderStorageBufferArrayDynamicIndexing, shaderStorageImageArrayDynamicIndexing, shaderClipDistance, shaderCullDistance, shaderFloat64, shaderInt64, shaderInt16, shaderResourceResidency, shaderResourceMinLod, sparseBinding, sparseResidencyBuffer, sparseResidencyImage2D, sparseResidencyImage3D, sparseResidency2Samples, sparseResidency4Samples, sparseResidency8Samples, sparseResidency16Samples, sparseResidencyAliased, variableMultisampleRate, inheritedQueries); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceFeatures const &) const = default; #else bool operator==(PhysicalDeviceFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (robustBufferAccess == rhs.robustBufferAccess) && (fullDrawIndexUint32 == rhs.fullDrawIndexUint32) && (imageCubeArray == rhs.imageCubeArray) && (independentBlend == rhs.independentBlend) && (geometryShader == rhs.geometryShader) && (tessellationShader == rhs.tessellationShader) && (sampleRateShading == rhs.sampleRateShading) && (dualSrcBlend == rhs.dualSrcBlend) && (logicOp == rhs.logicOp) && (multiDrawIndirect == rhs.multiDrawIndirect) && (drawIndirectFirstInstance == rhs.drawIndirectFirstInstance) && (depthClamp == rhs.depthClamp) && (depthBiasClamp == rhs.depthBiasClamp) && (fillModeNonSolid == rhs.fillModeNonSolid) && (depthBounds == rhs.depthBounds) && (wideLines == rhs.wideLines) && (largePoints == rhs.largePoints) && (alphaToOne == rhs.alphaToOne) && (multiViewport == rhs.multiViewport) && (samplerAnisotropy == rhs.samplerAnisotropy) && (textureCompressionETC2 == rhs.textureCompressionETC2) && (textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR) && (textureCompressionBC == rhs.textureCompressionBC) && (occlusionQueryPrecise == rhs.occlusionQueryPrecise) && (pipelineStatisticsQuery == rhs.pipelineStatisticsQuery) && (vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics) && (fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics) && (shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize) && (shaderImageGatherExtended == rhs.shaderImageGatherExtended) && (shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats) && (shaderStorageImageMultisample == rhs.shaderStorageImageMultisample) && (shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat) && (shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat) && (shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing) && (shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing) && (shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing) && (shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing) && (shaderClipDistance == rhs.shaderClipDistance) && (shaderCullDistance == rhs.shaderCullDistance) && (shaderFloat64 == rhs.shaderFloat64) && (shaderInt64 == rhs.shaderInt64) && (shaderInt16 == rhs.shaderInt16) && (shaderResourceResidency == rhs.shaderResourceResidency) && (shaderResourceMinLod == rhs.shaderResourceMinLod) && (sparseBinding == rhs.sparseBinding) && (sparseResidencyBuffer == rhs.sparseResidencyBuffer) && (sparseResidencyImage2D == rhs.sparseResidencyImage2D) && (sparseResidencyImage3D == rhs.sparseResidencyImage3D) && (sparseResidency2Samples == rhs.sparseResidency2Samples) && (sparseResidency4Samples == rhs.sparseResidency4Samples) && (sparseResidency8Samples == rhs.sparseResidency8Samples) && (sparseResidency16Samples == rhs.sparseResidency16Samples) && (sparseResidencyAliased == rhs.sparseResidencyAliased) && (variableMultisampleRate == rhs.variableMultisampleRate) && (inheritedQueries == rhs.inheritedQueries); # endif } bool operator!=(PhysicalDeviceFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess = {}; VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32 = {}; VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray = {}; VULKAN_HPP_NAMESPACE::Bool32 independentBlend = {}; VULKAN_HPP_NAMESPACE::Bool32 geometryShader = {}; VULKAN_HPP_NAMESPACE::Bool32 tessellationShader = {}; VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading = {}; VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend = {}; VULKAN_HPP_NAMESPACE::Bool32 logicOp = {}; VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect = {}; VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance = {}; VULKAN_HPP_NAMESPACE::Bool32 depthClamp = {}; VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp = {}; VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid = {}; VULKAN_HPP_NAMESPACE::Bool32 depthBounds = {}; VULKAN_HPP_NAMESPACE::Bool32 wideLines = {}; VULKAN_HPP_NAMESPACE::Bool32 largePoints = {}; VULKAN_HPP_NAMESPACE::Bool32 alphaToOne = {}; VULKAN_HPP_NAMESPACE::Bool32 multiViewport = {}; VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy = {}; VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2 = {}; VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR = {}; VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC = {}; VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise = {}; VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery = {}; VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics = {}; VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderInt64 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderInt16 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod = {}; VULKAN_HPP_NAMESPACE::Bool32 sparseBinding = {}; VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer = {}; VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D = {}; VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D = {}; VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples = {}; VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples = {}; VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples = {}; VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples = {}; VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased = {}; VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate = {}; VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures) == sizeof(VkPhysicalDeviceFeatures), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceFeatures is not nothrow_move_constructible!"); struct DeviceCreateInfo { using NativeType = VkDeviceCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DeviceCreateInfo(VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ = {}, uint32_t queueCreateInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo *pQueueCreateInfos_ = {}, uint32_t enabledLayerCount_ = {}, const char *const *ppEnabledLayerNames_ = {}, uint32_t enabledExtensionCount_ = {}, const char *const *ppEnabledExtensionNames_ = {}, const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures *pEnabledFeatures_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), queueCreateInfoCount(queueCreateInfoCount_), pQueueCreateInfos(pQueueCreateInfos_), enabledLayerCount(enabledLayerCount_), ppEnabledLayerNames(ppEnabledLayerNames_), enabledExtensionCount(enabledExtensionCount_), ppEnabledExtensionNames(ppEnabledExtensionNames_), pEnabledFeatures(pEnabledFeatures_) { } VULKAN_HPP_CONSTEXPR DeviceCreateInfo(DeviceCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceCreateInfo(VkDeviceCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : DeviceCreateInfo(*reinterpret_cast(&rhs)) {} # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DeviceCreateInfo(VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &queueCreateInfos_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &pEnabledLayerNames_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &pEnabledExtensionNames_ = {}, const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures *pEnabledFeatures_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , queueCreateInfoCount(static_cast(queueCreateInfos_.size())) , pQueueCreateInfos(queueCreateInfos_.data()) , enabledLayerCount(static_cast(pEnabledLayerNames_.size())) , ppEnabledLayerNames(pEnabledLayerNames_.data()) , enabledExtensionCount(static_cast(pEnabledExtensionNames_.size())) , ppEnabledExtensionNames(pEnabledExtensionNames_.data()) , pEnabledFeatures(pEnabledFeatures_) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DeviceCreateInfo &operator=(DeviceCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceCreateInfo &operator=(VkDeviceCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo &setFlags(VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo &setQueueCreateInfoCount(uint32_t queueCreateInfoCount_) VULKAN_HPP_NOEXCEPT { queueCreateInfoCount = queueCreateInfoCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPQueueCreateInfos(const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo *pQueueCreateInfos_) VULKAN_HPP_NOEXCEPT { pQueueCreateInfos = pQueueCreateInfos_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DeviceCreateInfo &setQueueCreateInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &queueCreateInfos_) VULKAN_HPP_NOEXCEPT { queueCreateInfoCount = static_cast(queueCreateInfos_.size()); pQueueCreateInfos = queueCreateInfos_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo &setEnabledLayerCount(uint32_t enabledLayerCount_) VULKAN_HPP_NOEXCEPT { enabledLayerCount = enabledLayerCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo &setPpEnabledLayerNames(const char *const *ppEnabledLayerNames_) VULKAN_HPP_NOEXCEPT { ppEnabledLayerNames = ppEnabledLayerNames_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DeviceCreateInfo & setPEnabledLayerNames(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &pEnabledLayerNames_) VULKAN_HPP_NOEXCEPT { enabledLayerCount = static_cast(pEnabledLayerNames_.size()); ppEnabledLayerNames = pEnabledLayerNames_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo &setEnabledExtensionCount(uint32_t enabledExtensionCount_) VULKAN_HPP_NOEXCEPT { enabledExtensionCount = enabledExtensionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo &setPpEnabledExtensionNames(const char *const *ppEnabledExtensionNames_) VULKAN_HPP_NOEXCEPT { ppEnabledExtensionNames = ppEnabledExtensionNames_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DeviceCreateInfo & setPEnabledExtensionNames(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &pEnabledExtensionNames_) VULKAN_HPP_NOEXCEPT { enabledExtensionCount = static_cast(pEnabledExtensionNames_.size()); ppEnabledExtensionNames = pEnabledExtensionNames_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo &setPEnabledFeatures(const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures *pEnabledFeatures_) VULKAN_HPP_NOEXCEPT { pEnabledFeatures = pEnabledFeatures_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, queueCreateInfoCount, pQueueCreateInfos, enabledLayerCount, ppEnabledLayerNames, enabledExtensionCount, ppEnabledExtensionNames, pEnabledFeatures); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) std::strong_ordering operator<=>(DeviceCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { if(auto cmp = sType <=> rhs.sType; cmp != 0) return cmp; if(auto cmp = pNext <=> rhs.pNext; cmp != 0) return cmp; if(auto cmp = flags <=> rhs.flags; cmp != 0) return cmp; if(auto cmp = queueCreateInfoCount <=> rhs.queueCreateInfoCount; cmp != 0) return cmp; if(auto cmp = pQueueCreateInfos <=> rhs.pQueueCreateInfos; cmp != 0) return cmp; if(auto cmp = enabledLayerCount <=> rhs.enabledLayerCount; cmp != 0) return cmp; for(size_t i = 0; i < enabledLayerCount; ++i) { if(ppEnabledLayerNames[i] != rhs.ppEnabledLayerNames[i]) if(auto cmp = strcmp(ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i]); cmp != 0) return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater; } if(auto cmp = enabledExtensionCount <=> rhs.enabledExtensionCount; cmp != 0) return cmp; for(size_t i = 0; i < enabledExtensionCount; ++i) { if(ppEnabledExtensionNames[i] != rhs.ppEnabledExtensionNames[i]) if(auto cmp = strcmp(ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i]); cmp != 0) return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater; } if(auto cmp = pEnabledFeatures <=> rhs.pEnabledFeatures; cmp != 0) return cmp; return std::strong_ordering::equivalent; } #endif bool operator==(DeviceCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (queueCreateInfoCount == rhs.queueCreateInfoCount) && (pQueueCreateInfos == rhs.pQueueCreateInfos) && (enabledLayerCount == rhs.enabledLayerCount) && [this, rhs] { bool equal = true; for(size_t i = 0; equal && (i < enabledLayerCount); ++i) { equal = ((ppEnabledLayerNames[i] == rhs.ppEnabledLayerNames[i]) || (strcmp(ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i]) == 0)); } return equal; }() && (enabledExtensionCount == rhs.enabledExtensionCount) && [this, rhs] { bool equal = true; for(size_t i = 0; equal && (i < enabledExtensionCount); ++i) { equal = ((ppEnabledExtensionNames[i] == rhs.ppEnabledExtensionNames[i]) || (strcmp(ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i]) == 0)); } return equal; }() && (pEnabledFeatures == rhs.pEnabledFeatures); } bool operator!=(DeviceCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags = {}; uint32_t queueCreateInfoCount = {}; const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo *pQueueCreateInfos = {}; uint32_t enabledLayerCount = {}; const char *const *ppEnabledLayerNames = {}; uint32_t enabledExtensionCount = {}; const char *const *ppEnabledExtensionNames = {}; const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures *pEnabledFeatures = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DeviceCreateInfo) == sizeof(VkDeviceCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DeviceCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = DeviceCreateInfo; }; struct DeviceDeviceMemoryReportCreateInfoEXT { using NativeType = VkDeviceDeviceMemoryReportCreateInfoEXT; static const bool allowDuplicate = true; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT(VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ = {}, void *pUserData_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), pfnUserCallback(pfnUserCallback_), pUserData(pUserData_) { } VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT(DeviceDeviceMemoryReportCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceDeviceMemoryReportCreateInfoEXT(VkDeviceDeviceMemoryReportCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : DeviceDeviceMemoryReportCreateInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DeviceDeviceMemoryReportCreateInfoEXT &operator=(DeviceDeviceMemoryReportCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceDeviceMemoryReportCreateInfoEXT &operator=(VkDeviceDeviceMemoryReportCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT &setFlags(VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setPfnUserCallback(PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_) VULKAN_HPP_NOEXCEPT { pfnUserCallback = pfnUserCallback_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT &setPUserData(void *pUserData_) VULKAN_HPP_NOEXCEPT { pUserData = pUserData_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDeviceDeviceMemoryReportCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDeviceDeviceMemoryReportCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, pfnUserCallback, pUserData); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DeviceDeviceMemoryReportCreateInfoEXT const &) const = default; #else bool operator==(DeviceDeviceMemoryReportCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (pfnUserCallback == rhs.pfnUserCallback) && (pUserData == rhs.pUserData); # endif } bool operator!=(DeviceDeviceMemoryReportCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {}; PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback = {}; void *pUserData = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT) == sizeof(VkDeviceDeviceMemoryReportCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DeviceDeviceMemoryReportCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = DeviceDeviceMemoryReportCreateInfoEXT; }; struct DeviceDiagnosticsConfigCreateInfoNV { using NativeType = VkDeviceDiagnosticsConfigCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV(VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_) { } VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV(DeviceDiagnosticsConfigCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceDiagnosticsConfigCreateInfoNV(VkDeviceDiagnosticsConfigCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : DeviceDiagnosticsConfigCreateInfoNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DeviceDiagnosticsConfigCreateInfoNV &operator=(DeviceDiagnosticsConfigCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceDiagnosticsConfigCreateInfoNV &operator=(VkDeviceDiagnosticsConfigCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV &setFlags(VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDeviceDiagnosticsConfigCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDeviceDiagnosticsConfigCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DeviceDiagnosticsConfigCreateInfoNV const &) const = default; #else bool operator==(DeviceDiagnosticsConfigCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags); # endif } bool operator!=(DeviceDiagnosticsConfigCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV) == sizeof(VkDeviceDiagnosticsConfigCreateInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DeviceDiagnosticsConfigCreateInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = DeviceDiagnosticsConfigCreateInfoNV; }; struct DeviceEventInfoEXT { using NativeType = VkDeviceEventInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceEventInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT(VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), deviceEvent(deviceEvent_) { } VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT(DeviceEventInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceEventInfoEXT(VkDeviceEventInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : DeviceEventInfoEXT(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DeviceEventInfoEXT &operator=(DeviceEventInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceEventInfoEXT &operator=(VkDeviceEventInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT &setDeviceEvent(VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_) VULKAN_HPP_NOEXCEPT { deviceEvent = deviceEvent_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDeviceEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDeviceEventInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, deviceEvent); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DeviceEventInfoEXT const &) const = default; #else bool operator==(DeviceEventInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (deviceEvent == rhs.deviceEvent); # endif } bool operator!=(DeviceEventInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceEventInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT) == sizeof(VkDeviceEventInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DeviceEventInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = DeviceEventInfoEXT; }; struct DeviceGroupBindSparseInfo { using NativeType = VkDeviceGroupBindSparseInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupBindSparseInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo(uint32_t resourceDeviceIndex_ = {}, uint32_t memoryDeviceIndex_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), resourceDeviceIndex(resourceDeviceIndex_), memoryDeviceIndex(memoryDeviceIndex_) { } VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo(DeviceGroupBindSparseInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceGroupBindSparseInfo(VkDeviceGroupBindSparseInfo const &rhs) VULKAN_HPP_NOEXCEPT : DeviceGroupBindSparseInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DeviceGroupBindSparseInfo &operator=(DeviceGroupBindSparseInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceGroupBindSparseInfo &operator=(VkDeviceGroupBindSparseInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo &setResourceDeviceIndex(uint32_t resourceDeviceIndex_) VULKAN_HPP_NOEXCEPT { resourceDeviceIndex = resourceDeviceIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo &setMemoryDeviceIndex(uint32_t memoryDeviceIndex_) VULKAN_HPP_NOEXCEPT { memoryDeviceIndex = memoryDeviceIndex_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDeviceGroupBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDeviceGroupBindSparseInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, resourceDeviceIndex, memoryDeviceIndex); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DeviceGroupBindSparseInfo const &) const = default; #else bool operator==(DeviceGroupBindSparseInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (resourceDeviceIndex == rhs.resourceDeviceIndex) && (memoryDeviceIndex == rhs.memoryDeviceIndex); # endif } bool operator!=(DeviceGroupBindSparseInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupBindSparseInfo; const void *pNext = {}; uint32_t resourceDeviceIndex = {}; uint32_t memoryDeviceIndex = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo) == sizeof(VkDeviceGroupBindSparseInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DeviceGroupBindSparseInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = DeviceGroupBindSparseInfo; }; using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo; struct DeviceGroupCommandBufferBeginInfo { using NativeType = VkDeviceGroupCommandBufferBeginInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupCommandBufferBeginInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo(uint32_t deviceMask_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), deviceMask(deviceMask_) { } VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo(DeviceGroupCommandBufferBeginInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceGroupCommandBufferBeginInfo(VkDeviceGroupCommandBufferBeginInfo const &rhs) VULKAN_HPP_NOEXCEPT : DeviceGroupCommandBufferBeginInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DeviceGroupCommandBufferBeginInfo &operator=(DeviceGroupCommandBufferBeginInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceGroupCommandBufferBeginInfo &operator=(VkDeviceGroupCommandBufferBeginInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo &setDeviceMask(uint32_t deviceMask_) VULKAN_HPP_NOEXCEPT { deviceMask = deviceMask_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDeviceGroupCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDeviceGroupCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, deviceMask); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DeviceGroupCommandBufferBeginInfo const &) const = default; #else bool operator==(DeviceGroupCommandBufferBeginInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (deviceMask == rhs.deviceMask); # endif } bool operator!=(DeviceGroupCommandBufferBeginInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo; const void *pNext = {}; uint32_t deviceMask = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo) == sizeof(VkDeviceGroupCommandBufferBeginInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DeviceGroupCommandBufferBeginInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = DeviceGroupCommandBufferBeginInfo; }; using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo; struct DeviceGroupDeviceCreateInfo { using NativeType = VkDeviceGroupDeviceCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupDeviceCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo(uint32_t physicalDeviceCount_ = {}, const VULKAN_HPP_NAMESPACE::PhysicalDevice *pPhysicalDevices_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), physicalDeviceCount(physicalDeviceCount_), pPhysicalDevices(pPhysicalDevices_) { } VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo(DeviceGroupDeviceCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceGroupDeviceCreateInfo(VkDeviceGroupDeviceCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : DeviceGroupDeviceCreateInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DeviceGroupDeviceCreateInfo(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &physicalDevices_, const void *pNext_ = nullptr) : pNext(pNext_) , physicalDeviceCount(static_cast(physicalDevices_.size())) , pPhysicalDevices(physicalDevices_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DeviceGroupDeviceCreateInfo &operator=(DeviceGroupDeviceCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceGroupDeviceCreateInfo &operator=(VkDeviceGroupDeviceCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo &setPhysicalDeviceCount(uint32_t physicalDeviceCount_) VULKAN_HPP_NOEXCEPT { physicalDeviceCount = physicalDeviceCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & setPPhysicalDevices(const VULKAN_HPP_NAMESPACE::PhysicalDevice *pPhysicalDevices_) VULKAN_HPP_NOEXCEPT { pPhysicalDevices = pPhysicalDevices_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DeviceGroupDeviceCreateInfo &setPhysicalDevices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &physicalDevices_) VULKAN_HPP_NOEXCEPT { physicalDeviceCount = static_cast(physicalDevices_.size()); pPhysicalDevices = physicalDevices_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDeviceGroupDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDeviceGroupDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, physicalDeviceCount, pPhysicalDevices); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DeviceGroupDeviceCreateInfo const &) const = default; #else bool operator==(DeviceGroupDeviceCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (physicalDeviceCount == rhs.physicalDeviceCount) && (pPhysicalDevices == rhs.pPhysicalDevices); # endif } bool operator!=(DeviceGroupDeviceCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo; const void *pNext = {}; uint32_t physicalDeviceCount = {}; const VULKAN_HPP_NAMESPACE::PhysicalDevice *pPhysicalDevices = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo) == sizeof(VkDeviceGroupDeviceCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DeviceGroupDeviceCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = DeviceGroupDeviceCreateInfo; }; using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo; struct DeviceGroupPresentCapabilitiesKHR { using NativeType = VkDeviceGroupPresentCapabilitiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentCapabilitiesKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR(std::array const &presentMask_ = {}, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), presentMask(presentMask_), modes(modes_) { } VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR(DeviceGroupPresentCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceGroupPresentCapabilitiesKHR(VkDeviceGroupPresentCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT : DeviceGroupPresentCapabilitiesKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DeviceGroupPresentCapabilitiesKHR &operator=(DeviceGroupPresentCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceGroupPresentCapabilitiesKHR &operator=(VkDeviceGroupPresentCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkDeviceGroupPresentCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDeviceGroupPresentCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, presentMask, modes); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DeviceGroupPresentCapabilitiesKHR const &) const = default; #else bool operator==(DeviceGroupPresentCapabilitiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (presentMask == rhs.presentMask) && (modes == rhs.modes); # endif } bool operator!=(DeviceGroupPresentCapabilitiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D presentMask = {}; VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR) == sizeof(VkDeviceGroupPresentCapabilitiesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DeviceGroupPresentCapabilitiesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = DeviceGroupPresentCapabilitiesKHR; }; struct DeviceGroupPresentInfoKHR { using NativeType = VkDeviceGroupPresentInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( uint32_t swapchainCount_ = {}, const uint32_t *pDeviceMasks_ = {}, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), swapchainCount(swapchainCount_), pDeviceMasks(pDeviceMasks_), mode(mode_) { } VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR(DeviceGroupPresentInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceGroupPresentInfoKHR(VkDeviceGroupPresentInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : DeviceGroupPresentInfoKHR(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DeviceGroupPresentInfoKHR(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &deviceMasks_, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal, const void *pNext_ = nullptr) : pNext(pNext_) , swapchainCount(static_cast(deviceMasks_.size())) , pDeviceMasks(deviceMasks_.data()) , mode(mode_) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DeviceGroupPresentInfoKHR &operator=(DeviceGroupPresentInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceGroupPresentInfoKHR &operator=(VkDeviceGroupPresentInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR &setSwapchainCount(uint32_t swapchainCount_) VULKAN_HPP_NOEXCEPT { swapchainCount = swapchainCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR &setPDeviceMasks(const uint32_t *pDeviceMasks_) VULKAN_HPP_NOEXCEPT { pDeviceMasks = pDeviceMasks_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DeviceGroupPresentInfoKHR &setDeviceMasks(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &deviceMasks_) VULKAN_HPP_NOEXCEPT { swapchainCount = static_cast(deviceMasks_.size()); pDeviceMasks = deviceMasks_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR &setMode(VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_) VULKAN_HPP_NOEXCEPT { mode = mode_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDeviceGroupPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDeviceGroupPresentInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, swapchainCount, pDeviceMasks, mode); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DeviceGroupPresentInfoKHR const &) const = default; #else bool operator==(DeviceGroupPresentInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (swapchainCount == rhs.swapchainCount) && (pDeviceMasks == rhs.pDeviceMasks) && (mode == rhs.mode); # endif } bool operator!=(DeviceGroupPresentInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentInfoKHR; const void *pNext = {}; uint32_t swapchainCount = {}; const uint32_t *pDeviceMasks = {}; VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR) == sizeof(VkDeviceGroupPresentInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DeviceGroupPresentInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = DeviceGroupPresentInfoKHR; }; struct DeviceGroupRenderPassBeginInfo { using NativeType = VkDeviceGroupRenderPassBeginInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupRenderPassBeginInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo(uint32_t deviceMask_ = {}, uint32_t deviceRenderAreaCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D *pDeviceRenderAreas_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), deviceMask(deviceMask_), deviceRenderAreaCount(deviceRenderAreaCount_), pDeviceRenderAreas(pDeviceRenderAreas_) { } VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo(DeviceGroupRenderPassBeginInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceGroupRenderPassBeginInfo(VkDeviceGroupRenderPassBeginInfo const &rhs) VULKAN_HPP_NOEXCEPT : DeviceGroupRenderPassBeginInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DeviceGroupRenderPassBeginInfo(uint32_t deviceMask_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &deviceRenderAreas_, const void *pNext_ = nullptr) : pNext(pNext_) , deviceMask(deviceMask_) , deviceRenderAreaCount(static_cast(deviceRenderAreas_.size())) , pDeviceRenderAreas(deviceRenderAreas_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DeviceGroupRenderPassBeginInfo &operator=(DeviceGroupRenderPassBeginInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceGroupRenderPassBeginInfo &operator=(VkDeviceGroupRenderPassBeginInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo &setDeviceMask(uint32_t deviceMask_) VULKAN_HPP_NOEXCEPT { deviceMask = deviceMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo &setDeviceRenderAreaCount(uint32_t deviceRenderAreaCount_) VULKAN_HPP_NOEXCEPT { deviceRenderAreaCount = deviceRenderAreaCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setPDeviceRenderAreas(const VULKAN_HPP_NAMESPACE::Rect2D *pDeviceRenderAreas_) VULKAN_HPP_NOEXCEPT { pDeviceRenderAreas = pDeviceRenderAreas_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DeviceGroupRenderPassBeginInfo & setDeviceRenderAreas(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &deviceRenderAreas_) VULKAN_HPP_NOEXCEPT { deviceRenderAreaCount = static_cast(deviceRenderAreas_.size()); pDeviceRenderAreas = deviceRenderAreas_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDeviceGroupRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDeviceGroupRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std:: tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, deviceMask, deviceRenderAreaCount, pDeviceRenderAreas); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DeviceGroupRenderPassBeginInfo const &) const = default; #else bool operator==(DeviceGroupRenderPassBeginInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (deviceMask == rhs.deviceMask) && (deviceRenderAreaCount == rhs.deviceRenderAreaCount) && (pDeviceRenderAreas == rhs.pDeviceRenderAreas); # endif } bool operator!=(DeviceGroupRenderPassBeginInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo; const void *pNext = {}; uint32_t deviceMask = {}; uint32_t deviceRenderAreaCount = {}; const VULKAN_HPP_NAMESPACE::Rect2D *pDeviceRenderAreas = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo) == sizeof(VkDeviceGroupRenderPassBeginInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DeviceGroupRenderPassBeginInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = DeviceGroupRenderPassBeginInfo; }; using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo; struct DeviceGroupSubmitInfo { using NativeType = VkDeviceGroupSubmitInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSubmitInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo(uint32_t waitSemaphoreCount_ = {}, const uint32_t *pWaitSemaphoreDeviceIndices_ = {}, uint32_t commandBufferCount_ = {}, const uint32_t *pCommandBufferDeviceMasks_ = {}, uint32_t signalSemaphoreCount_ = {}, const uint32_t *pSignalSemaphoreDeviceIndices_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), waitSemaphoreCount(waitSemaphoreCount_), pWaitSemaphoreDeviceIndices(pWaitSemaphoreDeviceIndices_), commandBufferCount(commandBufferCount_), pCommandBufferDeviceMasks(pCommandBufferDeviceMasks_), signalSemaphoreCount(signalSemaphoreCount_), pSignalSemaphoreDeviceIndices(pSignalSemaphoreDeviceIndices_) { } VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo(DeviceGroupSubmitInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceGroupSubmitInfo(VkDeviceGroupSubmitInfo const &rhs) VULKAN_HPP_NOEXCEPT : DeviceGroupSubmitInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DeviceGroupSubmitInfo(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &waitSemaphoreDeviceIndices_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &commandBufferDeviceMasks_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &signalSemaphoreDeviceIndices_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , waitSemaphoreCount(static_cast(waitSemaphoreDeviceIndices_.size())) , pWaitSemaphoreDeviceIndices(waitSemaphoreDeviceIndices_.data()) , commandBufferCount(static_cast(commandBufferDeviceMasks_.size())) , pCommandBufferDeviceMasks(commandBufferDeviceMasks_.data()) , signalSemaphoreCount(static_cast(signalSemaphoreDeviceIndices_.size())) , pSignalSemaphoreDeviceIndices(signalSemaphoreDeviceIndices_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DeviceGroupSubmitInfo &operator=(DeviceGroupSubmitInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceGroupSubmitInfo &operator=(VkDeviceGroupSubmitInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo &setWaitSemaphoreCount(uint32_t waitSemaphoreCount_) VULKAN_HPP_NOEXCEPT { waitSemaphoreCount = waitSemaphoreCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo &setPWaitSemaphoreDeviceIndices(const uint32_t *pWaitSemaphoreDeviceIndices_) VULKAN_HPP_NOEXCEPT { pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DeviceGroupSubmitInfo & setWaitSemaphoreDeviceIndices(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &waitSemaphoreDeviceIndices_) VULKAN_HPP_NOEXCEPT { waitSemaphoreCount = static_cast(waitSemaphoreDeviceIndices_.size()); pWaitSemaphoreDeviceIndices = waitSemaphoreDeviceIndices_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo &setCommandBufferCount(uint32_t commandBufferCount_) VULKAN_HPP_NOEXCEPT { commandBufferCount = commandBufferCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo &setPCommandBufferDeviceMasks(const uint32_t *pCommandBufferDeviceMasks_) VULKAN_HPP_NOEXCEPT { pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DeviceGroupSubmitInfo & setCommandBufferDeviceMasks(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &commandBufferDeviceMasks_) VULKAN_HPP_NOEXCEPT { commandBufferCount = static_cast(commandBufferDeviceMasks_.size()); pCommandBufferDeviceMasks = commandBufferDeviceMasks_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo &setSignalSemaphoreCount(uint32_t signalSemaphoreCount_) VULKAN_HPP_NOEXCEPT { signalSemaphoreCount = signalSemaphoreCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo &setPSignalSemaphoreDeviceIndices(const uint32_t *pSignalSemaphoreDeviceIndices_) VULKAN_HPP_NOEXCEPT { pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DeviceGroupSubmitInfo & setSignalSemaphoreDeviceIndices(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &signalSemaphoreDeviceIndices_) VULKAN_HPP_NOEXCEPT { signalSemaphoreCount = static_cast(signalSemaphoreDeviceIndices_.size()); pSignalSemaphoreDeviceIndices = signalSemaphoreDeviceIndices_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDeviceGroupSubmitInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDeviceGroupSubmitInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, waitSemaphoreCount, pWaitSemaphoreDeviceIndices, commandBufferCount, pCommandBufferDeviceMasks, signalSemaphoreCount, pSignalSemaphoreDeviceIndices); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DeviceGroupSubmitInfo const &) const = default; #else bool operator==(DeviceGroupSubmitInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (waitSemaphoreCount == rhs.waitSemaphoreCount) && (pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices) && (commandBufferCount == rhs.commandBufferCount) && (pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks) && (signalSemaphoreCount == rhs.signalSemaphoreCount) && (pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices); # endif } bool operator!=(DeviceGroupSubmitInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSubmitInfo; const void *pNext = {}; uint32_t waitSemaphoreCount = {}; const uint32_t *pWaitSemaphoreDeviceIndices = {}; uint32_t commandBufferCount = {}; const uint32_t *pCommandBufferDeviceMasks = {}; uint32_t signalSemaphoreCount = {}; const uint32_t *pSignalSemaphoreDeviceIndices = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo) == sizeof(VkDeviceGroupSubmitInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DeviceGroupSubmitInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = DeviceGroupSubmitInfo; }; using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo; struct DeviceGroupSwapchainCreateInfoKHR { using NativeType = VkDeviceGroupSwapchainCreateInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSwapchainCreateInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR(VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), modes(modes_) { } VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR(DeviceGroupSwapchainCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceGroupSwapchainCreateInfoKHR(VkDeviceGroupSwapchainCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : DeviceGroupSwapchainCreateInfoKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DeviceGroupSwapchainCreateInfoKHR &operator=(DeviceGroupSwapchainCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceGroupSwapchainCreateInfoKHR &operator=(VkDeviceGroupSwapchainCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR &setModes(VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_) VULKAN_HPP_NOEXCEPT { modes = modes_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDeviceGroupSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDeviceGroupSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, modes); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DeviceGroupSwapchainCreateInfoKHR const &) const = default; #else bool operator==(DeviceGroupSwapchainCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (modes == rhs.modes); # endif } bool operator!=(DeviceGroupSwapchainCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR) == sizeof(VkDeviceGroupSwapchainCreateInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DeviceGroupSwapchainCreateInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = DeviceGroupSwapchainCreateInfoKHR; }; struct ImageCreateInfo { using NativeType = VkImageCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImageCreateInfo(VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, uint32_t mipLevels_ = {}, uint32_t arrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t *pQueueFamilyIndices_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), imageType(imageType_), format(format_), extent(extent_), mipLevels(mipLevels_), arrayLayers(arrayLayers_), samples(samples_), tiling(tiling_), usage(usage_), sharingMode(sharingMode_), queueFamilyIndexCount(queueFamilyIndexCount_), pQueueFamilyIndices(pQueueFamilyIndices_), initialLayout(initialLayout_) { } VULKAN_HPP_CONSTEXPR ImageCreateInfo(ImageCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageCreateInfo(VkImageCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : ImageCreateInfo(*reinterpret_cast(&rhs)) {} # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) ImageCreateInfo(VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_, VULKAN_HPP_NAMESPACE::ImageType imageType_, VULKAN_HPP_NAMESPACE::Format format_, VULKAN_HPP_NAMESPACE::Extent3D extent_, uint32_t mipLevels_, uint32_t arrayLayers_, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_, VULKAN_HPP_NAMESPACE::ImageTiling tiling_, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &queueFamilyIndices_, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , imageType(imageType_) , format(format_) , extent(extent_) , mipLevels(mipLevels_) , arrayLayers(arrayLayers_) , samples(samples_) , tiling(tiling_) , usage(usage_) , sharingMode(sharingMode_) , queueFamilyIndexCount(static_cast(queueFamilyIndices_.size())) , pQueueFamilyIndices(queueFamilyIndices_.data()) , initialLayout(initialLayout_) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageCreateInfo &operator=(ImageCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageCreateInfo &operator=(VkImageCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo &setFlags(VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo &setImageType(VULKAN_HPP_NAMESPACE::ImageType imageType_) VULKAN_HPP_NOEXCEPT { imageType = imageType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo &setFormat(VULKAN_HPP_NAMESPACE::Format format_) VULKAN_HPP_NOEXCEPT { format = format_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo &setExtent(VULKAN_HPP_NAMESPACE::Extent3D const &extent_) VULKAN_HPP_NOEXCEPT { extent = extent_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo &setMipLevels(uint32_t mipLevels_) VULKAN_HPP_NOEXCEPT { mipLevels = mipLevels_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo &setArrayLayers(uint32_t arrayLayers_) VULKAN_HPP_NOEXCEPT { arrayLayers = arrayLayers_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo &setSamples(VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_) VULKAN_HPP_NOEXCEPT { samples = samples_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo &setTiling(VULKAN_HPP_NAMESPACE::ImageTiling tiling_) VULKAN_HPP_NOEXCEPT { tiling = tiling_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo &setUsage(VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_) VULKAN_HPP_NOEXCEPT { usage = usage_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo &setSharingMode(VULKAN_HPP_NAMESPACE::SharingMode sharingMode_) VULKAN_HPP_NOEXCEPT { sharingMode = sharingMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo &setQueueFamilyIndexCount(uint32_t queueFamilyIndexCount_) VULKAN_HPP_NOEXCEPT { queueFamilyIndexCount = queueFamilyIndexCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo &setPQueueFamilyIndices(const uint32_t *pQueueFamilyIndices_) VULKAN_HPP_NOEXCEPT { pQueueFamilyIndices = pQueueFamilyIndices_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) ImageCreateInfo &setQueueFamilyIndices(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &queueFamilyIndices_) VULKAN_HPP_NOEXCEPT { queueFamilyIndexCount = static_cast(queueFamilyIndices_.size()); pQueueFamilyIndices = queueFamilyIndices_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo &setInitialLayout(VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_) VULKAN_HPP_NOEXCEPT { initialLayout = initialLayout_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImageCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, imageType, format, extent, mipLevels, arrayLayers, samples, tiling, usage, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices, initialLayout); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImageCreateInfo const &) const = default; #else bool operator==(ImageCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (imageType == rhs.imageType) && (format == rhs.format) && (extent == rhs.extent) && (mipLevels == rhs.mipLevels) && (arrayLayers == rhs.arrayLayers) && (samples == rhs.samples) && (tiling == rhs.tiling) && (usage == rhs.usage) && (sharingMode == rhs.sharingMode) && (queueFamilyIndexCount == rhs.queueFamilyIndexCount) && (pQueueFamilyIndices == rhs.pQueueFamilyIndices) && (initialLayout == rhs.initialLayout); # endif } bool operator!=(ImageCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; VULKAN_HPP_NAMESPACE::ImageType imageType = VULKAN_HPP_NAMESPACE::ImageType::e1D; VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; VULKAN_HPP_NAMESPACE::Extent3D extent = {}; uint32_t mipLevels = {}; uint32_t arrayLayers = {}; VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; uint32_t queueFamilyIndexCount = {}; const uint32_t *pQueueFamilyIndices = {}; VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImageCreateInfo) == sizeof(VkImageCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImageCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImageCreateInfo; }; struct DeviceImageMemoryRequirements { using NativeType = VkDeviceImageMemoryRequirements; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceImageMemoryRequirements; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DeviceImageMemoryRequirements(const VULKAN_HPP_NAMESPACE::ImageCreateInfo *pCreateInfo_ = {}, VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), pCreateInfo(pCreateInfo_), planeAspect(planeAspect_) { } VULKAN_HPP_CONSTEXPR DeviceImageMemoryRequirements(DeviceImageMemoryRequirements const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceImageMemoryRequirements(VkDeviceImageMemoryRequirements const &rhs) VULKAN_HPP_NOEXCEPT : DeviceImageMemoryRequirements(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DeviceImageMemoryRequirements &operator=(DeviceImageMemoryRequirements const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceImageMemoryRequirements &operator=(VkDeviceImageMemoryRequirements const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements &setPCreateInfo(const VULKAN_HPP_NAMESPACE::ImageCreateInfo *pCreateInfo_) VULKAN_HPP_NOEXCEPT { pCreateInfo = pCreateInfo_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements &setPlaneAspect(VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_) VULKAN_HPP_NOEXCEPT { planeAspect = planeAspect_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDeviceImageMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDeviceImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, pCreateInfo, planeAspect); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DeviceImageMemoryRequirements const &) const = default; #else bool operator==(DeviceImageMemoryRequirements const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (pCreateInfo == rhs.pCreateInfo) && (planeAspect == rhs.planeAspect); # endif } bool operator!=(DeviceImageMemoryRequirements const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceImageMemoryRequirements; const void *pNext = {}; const VULKAN_HPP_NAMESPACE::ImageCreateInfo *pCreateInfo = {}; VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements) == sizeof(VkDeviceImageMemoryRequirements), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DeviceImageMemoryRequirements is not nothrow_move_constructible!"); template<> struct CppType { using Type = DeviceImageMemoryRequirements; }; using DeviceImageMemoryRequirementsKHR = DeviceImageMemoryRequirements; struct DeviceMemoryOpaqueCaptureAddressInfo { using NativeType = VkDeviceMemoryOpaqueCaptureAddressInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), memory(memory_) { } VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo(DeviceMemoryOpaqueCaptureAddressInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceMemoryOpaqueCaptureAddressInfo(VkDeviceMemoryOpaqueCaptureAddressInfo const &rhs) VULKAN_HPP_NOEXCEPT : DeviceMemoryOpaqueCaptureAddressInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DeviceMemoryOpaqueCaptureAddressInfo &operator=(DeviceMemoryOpaqueCaptureAddressInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceMemoryOpaqueCaptureAddressInfo &operator=(VkDeviceMemoryOpaqueCaptureAddressInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo &setMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory_) VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDeviceMemoryOpaqueCaptureAddressInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDeviceMemoryOpaqueCaptureAddressInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, memory); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DeviceMemoryOpaqueCaptureAddressInfo const &) const = default; #else bool operator==(DeviceMemoryOpaqueCaptureAddressInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (memory == rhs.memory); # endif } bool operator!=(DeviceMemoryOpaqueCaptureAddressInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo) == sizeof(VkDeviceMemoryOpaqueCaptureAddressInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DeviceMemoryOpaqueCaptureAddressInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = DeviceMemoryOpaqueCaptureAddressInfo; }; using DeviceMemoryOpaqueCaptureAddressInfoKHR = DeviceMemoryOpaqueCaptureAddressInfo; struct DeviceMemoryOverallocationCreateInfoAMD { using NativeType = VkDeviceMemoryOverallocationCreateInfoAMD; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), overallocationBehavior(overallocationBehavior_) { } VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD(DeviceMemoryOverallocationCreateInfoAMD const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceMemoryOverallocationCreateInfoAMD(VkDeviceMemoryOverallocationCreateInfoAMD const &rhs) VULKAN_HPP_NOEXCEPT : DeviceMemoryOverallocationCreateInfoAMD(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DeviceMemoryOverallocationCreateInfoAMD &operator=(DeviceMemoryOverallocationCreateInfoAMD const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceMemoryOverallocationCreateInfoAMD &operator=(VkDeviceMemoryOverallocationCreateInfoAMD const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD & setOverallocationBehavior(VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_) VULKAN_HPP_NOEXCEPT { overallocationBehavior = overallocationBehavior_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDeviceMemoryOverallocationCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDeviceMemoryOverallocationCreateInfoAMD &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, overallocationBehavior); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DeviceMemoryOverallocationCreateInfoAMD const &) const = default; #else bool operator==(DeviceMemoryOverallocationCreateInfoAMD const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (overallocationBehavior == rhs.overallocationBehavior); # endif } bool operator!=(DeviceMemoryOverallocationCreateInfoAMD const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD; const void *pNext = {}; VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD) == sizeof(VkDeviceMemoryOverallocationCreateInfoAMD), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DeviceMemoryOverallocationCreateInfoAMD is not nothrow_move_constructible!"); template<> struct CppType { using Type = DeviceMemoryOverallocationCreateInfoAMD; }; struct DeviceMemoryReportCallbackDataEXT { using NativeType = VkDeviceMemoryReportCallbackDataEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryReportCallbackDataEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type_ = VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate, uint64_t memoryObjectId_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, uint64_t objectHandle_ = {}, uint32_t heapIndex_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), type(type_), memoryObjectId(memoryObjectId_), size(size_), objectType(objectType_), objectHandle(objectHandle_), heapIndex(heapIndex_) { } VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT(DeviceMemoryReportCallbackDataEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceMemoryReportCallbackDataEXT(VkDeviceMemoryReportCallbackDataEXT const &rhs) VULKAN_HPP_NOEXCEPT : DeviceMemoryReportCallbackDataEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DeviceMemoryReportCallbackDataEXT &operator=(DeviceMemoryReportCallbackDataEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceMemoryReportCallbackDataEXT &operator=(VkDeviceMemoryReportCallbackDataEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkDeviceMemoryReportCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDeviceMemoryReportCallbackDataEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, type, memoryObjectId, size, objectType, objectHandle, heapIndex); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DeviceMemoryReportCallbackDataEXT const &) const = default; #else bool operator==(DeviceMemoryReportCallbackDataEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (type == rhs.type) && (memoryObjectId == rhs.memoryObjectId) && (size == rhs.size) && (objectType == rhs.objectType) && (objectHandle == rhs.objectHandle) && (heapIndex == rhs.heapIndex); # endif } bool operator!=(DeviceMemoryReportCallbackDataEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryReportCallbackDataEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {}; VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type = VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate; uint64_t memoryObjectId = {}; VULKAN_HPP_NAMESPACE::DeviceSize size = {}; VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; uint64_t objectHandle = {}; uint32_t heapIndex = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT) == sizeof(VkDeviceMemoryReportCallbackDataEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DeviceMemoryReportCallbackDataEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = DeviceMemoryReportCallbackDataEXT; }; struct DevicePrivateDataCreateInfo { using NativeType = VkDevicePrivateDataCreateInfo; static const bool allowDuplicate = true; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDevicePrivateDataCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfo(uint32_t privateDataSlotRequestCount_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), privateDataSlotRequestCount(privateDataSlotRequestCount_) { } VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfo(DevicePrivateDataCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DevicePrivateDataCreateInfo(VkDevicePrivateDataCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : DevicePrivateDataCreateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DevicePrivateDataCreateInfo &operator=(DevicePrivateDataCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; DevicePrivateDataCreateInfo &operator=(VkDevicePrivateDataCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfo &setPrivateDataSlotRequestCount(uint32_t privateDataSlotRequestCount_) VULKAN_HPP_NOEXCEPT { privateDataSlotRequestCount = privateDataSlotRequestCount_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDevicePrivateDataCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDevicePrivateDataCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, privateDataSlotRequestCount); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DevicePrivateDataCreateInfo const &) const = default; #else bool operator==(DevicePrivateDataCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (privateDataSlotRequestCount == rhs.privateDataSlotRequestCount); # endif } bool operator!=(DevicePrivateDataCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDevicePrivateDataCreateInfo; const void *pNext = {}; uint32_t privateDataSlotRequestCount = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo) == sizeof(VkDevicePrivateDataCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DevicePrivateDataCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = DevicePrivateDataCreateInfo; }; using DevicePrivateDataCreateInfoEXT = DevicePrivateDataCreateInfo; struct DeviceQueueGlobalPriorityCreateInfoKHR { using NativeType = VkDeviceQueueGlobalPriorityCreateInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueGlobalPriorityCreateInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoKHR(VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR globalPriority_ = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), globalPriority(globalPriority_) { } VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoKHR(DeviceQueueGlobalPriorityCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceQueueGlobalPriorityCreateInfoKHR(VkDeviceQueueGlobalPriorityCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : DeviceQueueGlobalPriorityCreateInfoKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DeviceQueueGlobalPriorityCreateInfoKHR &operator=(DeviceQueueGlobalPriorityCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceQueueGlobalPriorityCreateInfoKHR &operator=(VkDeviceQueueGlobalPriorityCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfoKHR & setGlobalPriority(VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR globalPriority_) VULKAN_HPP_NOEXCEPT { globalPriority = globalPriority_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDeviceQueueGlobalPriorityCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDeviceQueueGlobalPriorityCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, globalPriority); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DeviceQueueGlobalPriorityCreateInfoKHR const &) const = default; #else bool operator==(DeviceQueueGlobalPriorityCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (globalPriority == rhs.globalPriority); # endif } bool operator!=(DeviceQueueGlobalPriorityCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR globalPriority = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR) == sizeof(VkDeviceQueueGlobalPriorityCreateInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DeviceQueueGlobalPriorityCreateInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = DeviceQueueGlobalPriorityCreateInfoKHR; }; using DeviceQueueGlobalPriorityCreateInfoEXT = DeviceQueueGlobalPriorityCreateInfoKHR; struct DeviceQueueInfo2 { using NativeType = VkDeviceQueueInfo2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueInfo2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DeviceQueueInfo2(VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}, uint32_t queueIndex_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), queueFamilyIndex(queueFamilyIndex_), queueIndex(queueIndex_) { } VULKAN_HPP_CONSTEXPR DeviceQueueInfo2(DeviceQueueInfo2 const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceQueueInfo2(VkDeviceQueueInfo2 const &rhs) VULKAN_HPP_NOEXCEPT : DeviceQueueInfo2(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DeviceQueueInfo2 &operator=(DeviceQueueInfo2 const &rhs) VULKAN_HPP_NOEXCEPT = default; DeviceQueueInfo2 &operator=(VkDeviceQueueInfo2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 &setFlags(VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 &setQueueFamilyIndex(uint32_t queueFamilyIndex_) VULKAN_HPP_NOEXCEPT { queueFamilyIndex = queueFamilyIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 &setQueueIndex(uint32_t queueIndex_) VULKAN_HPP_NOEXCEPT { queueIndex = queueIndex_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDeviceQueueInfo2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDeviceQueueInfo2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, queueFamilyIndex, queueIndex); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DeviceQueueInfo2 const &) const = default; #else bool operator==(DeviceQueueInfo2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (queueFamilyIndex == rhs.queueFamilyIndex) && (queueIndex == rhs.queueIndex); # endif } bool operator!=(DeviceQueueInfo2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueInfo2; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {}; uint32_t queueFamilyIndex = {}; uint32_t queueIndex = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DeviceQueueInfo2) == sizeof(VkDeviceQueueInfo2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DeviceQueueInfo2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = DeviceQueueInfo2; }; #if defined(VK_USE_PLATFORM_DIRECTFB_EXT) struct DirectFBSurfaceCreateInfoEXT { using NativeType = VkDirectFBSurfaceCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDirectfbSurfaceCreateInfoEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT(VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ = {}, IDirectFB *dfb_ = {}, IDirectFBSurface *surface_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), dfb(dfb_), surface(surface_) { } VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT(DirectFBSurfaceCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DirectFBSurfaceCreateInfoEXT(VkDirectFBSurfaceCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : DirectFBSurfaceCreateInfoEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DirectFBSurfaceCreateInfoEXT &operator=(DirectFBSurfaceCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DirectFBSurfaceCreateInfoEXT &operator=(VkDirectFBSurfaceCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT &setFlags(VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT &setDfb(IDirectFB *dfb_) VULKAN_HPP_NOEXCEPT { dfb = dfb_; return *this; } VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT &setSurface(IDirectFBSurface *surface_) VULKAN_HPP_NOEXCEPT { surface = surface_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDirectFBSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDirectFBSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, dfb, surface); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DirectFBSurfaceCreateInfoEXT const &) const = default; # else bool operator==(DirectFBSurfaceCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (dfb == rhs.dfb) && (surface == rhs.surface); # endif } bool operator!=(DirectFBSurfaceCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDirectfbSurfaceCreateInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags = {}; IDirectFB *dfb = {}; IDirectFBSurface *surface = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT) == sizeof(VkDirectFBSurfaceCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DirectFBSurfaceCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = DirectFBSurfaceCreateInfoEXT; }; #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ struct DispatchIndirectCommand { using NativeType = VkDispatchIndirectCommand; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DispatchIndirectCommand(uint32_t x_ = {}, uint32_t y_ = {}, uint32_t z_ = {}) VULKAN_HPP_NOEXCEPT : x(x_), y(y_), z(z_) { } VULKAN_HPP_CONSTEXPR DispatchIndirectCommand(DispatchIndirectCommand const &rhs) VULKAN_HPP_NOEXCEPT = default; DispatchIndirectCommand(VkDispatchIndirectCommand const &rhs) VULKAN_HPP_NOEXCEPT : DispatchIndirectCommand(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DispatchIndirectCommand &operator=(DispatchIndirectCommand const &rhs) VULKAN_HPP_NOEXCEPT = default; DispatchIndirectCommand &operator=(VkDispatchIndirectCommand const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand &setX(uint32_t x_) VULKAN_HPP_NOEXCEPT { x = x_; return *this; } VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand &setY(uint32_t y_) VULKAN_HPP_NOEXCEPT { y = y_; return *this; } VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand &setZ(uint32_t z_) VULKAN_HPP_NOEXCEPT { z = z_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDispatchIndirectCommand const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDispatchIndirectCommand &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(x, y, z); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DispatchIndirectCommand const &) const = default; #else bool operator==(DispatchIndirectCommand const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (x == rhs.x) && (y == rhs.y) && (z == rhs.z); # endif } bool operator!=(DispatchIndirectCommand const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t x = {}; uint32_t y = {}; uint32_t z = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DispatchIndirectCommand) == sizeof(VkDispatchIndirectCommand), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DispatchIndirectCommand is not nothrow_move_constructible!"); struct DisplayEventInfoEXT { using NativeType = VkDisplayEventInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayEventInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT(VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), displayEvent(displayEvent_) { } VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT(DisplayEventInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplayEventInfoEXT(VkDisplayEventInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : DisplayEventInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DisplayEventInfoEXT &operator=(DisplayEventInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplayEventInfoEXT &operator=(VkDisplayEventInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT &setDisplayEvent(VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_) VULKAN_HPP_NOEXCEPT { displayEvent = displayEvent_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDisplayEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDisplayEventInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, displayEvent); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DisplayEventInfoEXT const &) const = default; #else bool operator==(DisplayEventInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (displayEvent == rhs.displayEvent); # endif } bool operator!=(DisplayEventInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayEventInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT) == sizeof(VkDisplayEventInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DisplayEventInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = DisplayEventInfoEXT; }; struct DisplayModeParametersKHR { using NativeType = VkDisplayModeParametersKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR(VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ = {}, uint32_t refreshRate_ = {}) VULKAN_HPP_NOEXCEPT : visibleRegion(visibleRegion_), refreshRate(refreshRate_) { } VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR(DisplayModeParametersKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplayModeParametersKHR(VkDisplayModeParametersKHR const &rhs) VULKAN_HPP_NOEXCEPT : DisplayModeParametersKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DisplayModeParametersKHR &operator=(DisplayModeParametersKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplayModeParametersKHR &operator=(VkDisplayModeParametersKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR &setVisibleRegion(VULKAN_HPP_NAMESPACE::Extent2D const &visibleRegion_) VULKAN_HPP_NOEXCEPT { visibleRegion = visibleRegion_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR &setRefreshRate(uint32_t refreshRate_) VULKAN_HPP_NOEXCEPT { refreshRate = refreshRate_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDisplayModeParametersKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDisplayModeParametersKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(visibleRegion, refreshRate); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DisplayModeParametersKHR const &) const = default; #else bool operator==(DisplayModeParametersKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (visibleRegion == rhs.visibleRegion) && (refreshRate == rhs.refreshRate); # endif } bool operator!=(DisplayModeParametersKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::Extent2D visibleRegion = {}; uint32_t refreshRate = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR) == sizeof(VkDisplayModeParametersKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DisplayModeParametersKHR is not nothrow_move_constructible!"); struct DisplayModeCreateInfoKHR { using NativeType = VkDisplayModeCreateInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeCreateInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR(VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), parameters(parameters_) { } VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR(DisplayModeCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplayModeCreateInfoKHR(VkDisplayModeCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : DisplayModeCreateInfoKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DisplayModeCreateInfoKHR &operator=(DisplayModeCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplayModeCreateInfoKHR &operator=(VkDisplayModeCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR &setFlags(VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR &setParameters(VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const ¶meters_) VULKAN_HPP_NOEXCEPT { parameters = parameters_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDisplayModeCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDisplayModeCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, parameters); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DisplayModeCreateInfoKHR const &) const = default; #else bool operator==(DisplayModeCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (parameters == rhs.parameters); # endif } bool operator!=(DisplayModeCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeCreateInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags = {}; VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR) == sizeof(VkDisplayModeCreateInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DisplayModeCreateInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = DisplayModeCreateInfoKHR; }; struct DisplayModePropertiesKHR { using NativeType = VkDisplayModePropertiesKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {}) VULKAN_HPP_NOEXCEPT : displayMode(displayMode_), parameters(parameters_) { } VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR(DisplayModePropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplayModePropertiesKHR(VkDisplayModePropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT : DisplayModePropertiesKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DisplayModePropertiesKHR &operator=(DisplayModePropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplayModePropertiesKHR &operator=(VkDisplayModePropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkDisplayModePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDisplayModePropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(displayMode, parameters); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DisplayModePropertiesKHR const &) const = default; #else bool operator==(DisplayModePropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (displayMode == rhs.displayMode) && (parameters == rhs.parameters); # endif } bool operator!=(DisplayModePropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {}; VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR) == sizeof(VkDisplayModePropertiesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DisplayModePropertiesKHR is not nothrow_move_constructible!"); struct DisplayModeProperties2KHR { using NativeType = VkDisplayModeProperties2KHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeProperties2KHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), displayModeProperties(displayModeProperties_) { } VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR(DisplayModeProperties2KHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplayModeProperties2KHR(VkDisplayModeProperties2KHR const &rhs) VULKAN_HPP_NOEXCEPT : DisplayModeProperties2KHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DisplayModeProperties2KHR &operator=(DisplayModeProperties2KHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplayModeProperties2KHR &operator=(VkDisplayModeProperties2KHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkDisplayModeProperties2KHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDisplayModeProperties2KHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, displayModeProperties); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DisplayModeProperties2KHR const &) const = default; #else bool operator==(DisplayModeProperties2KHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (displayModeProperties == rhs.displayModeProperties); # endif } bool operator!=(DisplayModeProperties2KHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeProperties2KHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR) == sizeof(VkDisplayModeProperties2KHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DisplayModeProperties2KHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = DisplayModeProperties2KHR; }; struct DisplayNativeHdrSurfaceCapabilitiesAMD { using NativeType = VkDisplayNativeHdrSurfaceCapabilitiesAMD; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD(VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), localDimmingSupport(localDimmingSupport_) { } VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD(DisplayNativeHdrSurfaceCapabilitiesAMD const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplayNativeHdrSurfaceCapabilitiesAMD(VkDisplayNativeHdrSurfaceCapabilitiesAMD const &rhs) VULKAN_HPP_NOEXCEPT : DisplayNativeHdrSurfaceCapabilitiesAMD(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DisplayNativeHdrSurfaceCapabilitiesAMD &operator=(DisplayNativeHdrSurfaceCapabilitiesAMD const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplayNativeHdrSurfaceCapabilitiesAMD &operator=(VkDisplayNativeHdrSurfaceCapabilitiesAMD const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDisplayNativeHdrSurfaceCapabilitiesAMD &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, localDimmingSupport); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DisplayNativeHdrSurfaceCapabilitiesAMD const &) const = default; #else bool operator==(DisplayNativeHdrSurfaceCapabilitiesAMD const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (localDimmingSupport == rhs.localDimmingSupport); # endif } bool operator!=(DisplayNativeHdrSurfaceCapabilitiesAMD const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD) == sizeof(VkDisplayNativeHdrSurfaceCapabilitiesAMD), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DisplayNativeHdrSurfaceCapabilitiesAMD is not nothrow_move_constructible!"); template<> struct CppType { using Type = DisplayNativeHdrSurfaceCapabilitiesAMD; }; struct DisplayPlaneCapabilitiesKHR { using NativeType = VkDisplayPlaneCapabilitiesKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR(VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha_ = {}, VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition_ = {}, VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent_ = {}, VULKAN_HPP_NAMESPACE::Offset2D minDstPosition_ = {}, VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minDstExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent_ = {}) VULKAN_HPP_NOEXCEPT : supportedAlpha(supportedAlpha_), minSrcPosition(minSrcPosition_), maxSrcPosition(maxSrcPosition_), minSrcExtent(minSrcExtent_), maxSrcExtent(maxSrcExtent_), minDstPosition(minDstPosition_), maxDstPosition(maxDstPosition_), minDstExtent(minDstExtent_), maxDstExtent(maxDstExtent_) { } VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR(DisplayPlaneCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplayPlaneCapabilitiesKHR(VkDisplayPlaneCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT : DisplayPlaneCapabilitiesKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DisplayPlaneCapabilitiesKHR &operator=(DisplayPlaneCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplayPlaneCapabilitiesKHR &operator=(VkDisplayPlaneCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkDisplayPlaneCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDisplayPlaneCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(supportedAlpha, minSrcPosition, maxSrcPosition, minSrcExtent, maxSrcExtent, minDstPosition, maxDstPosition, minDstExtent, maxDstExtent); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DisplayPlaneCapabilitiesKHR const &) const = default; #else bool operator==(DisplayPlaneCapabilitiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (supportedAlpha == rhs.supportedAlpha) && (minSrcPosition == rhs.minSrcPosition) && (maxSrcPosition == rhs.maxSrcPosition) && (minSrcExtent == rhs.minSrcExtent) && (maxSrcExtent == rhs.maxSrcExtent) && (minDstPosition == rhs.minDstPosition) && (maxDstPosition == rhs.maxDstPosition) && (minDstExtent == rhs.minDstExtent) && (maxDstExtent == rhs.maxDstExtent); # endif } bool operator!=(DisplayPlaneCapabilitiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha = {}; VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition = {}; VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition = {}; VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent = {}; VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent = {}; VULKAN_HPP_NAMESPACE::Offset2D minDstPosition = {}; VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition = {}; VULKAN_HPP_NAMESPACE::Extent2D minDstExtent = {}; VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR) == sizeof(VkDisplayPlaneCapabilitiesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DisplayPlaneCapabilitiesKHR is not nothrow_move_constructible!"); struct DisplayPlaneCapabilities2KHR { using NativeType = VkDisplayPlaneCapabilities2KHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneCapabilities2KHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR(VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), capabilities(capabilities_) { } VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR(DisplayPlaneCapabilities2KHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplayPlaneCapabilities2KHR(VkDisplayPlaneCapabilities2KHR const &rhs) VULKAN_HPP_NOEXCEPT : DisplayPlaneCapabilities2KHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DisplayPlaneCapabilities2KHR &operator=(DisplayPlaneCapabilities2KHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplayPlaneCapabilities2KHR &operator=(VkDisplayPlaneCapabilities2KHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkDisplayPlaneCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDisplayPlaneCapabilities2KHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, capabilities); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DisplayPlaneCapabilities2KHR const &) const = default; #else bool operator==(DisplayPlaneCapabilities2KHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (capabilities == rhs.capabilities); # endif } bool operator!=(DisplayPlaneCapabilities2KHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneCapabilities2KHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR) == sizeof(VkDisplayPlaneCapabilities2KHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DisplayPlaneCapabilities2KHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = DisplayPlaneCapabilities2KHR; }; struct DisplayPlaneInfo2KHR { using NativeType = VkDisplayPlaneInfo2KHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneInfo2KHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR(VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ = {}, uint32_t planeIndex_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), mode(mode_), planeIndex(planeIndex_) { } VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR(DisplayPlaneInfo2KHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplayPlaneInfo2KHR(VkDisplayPlaneInfo2KHR const &rhs) VULKAN_HPP_NOEXCEPT : DisplayPlaneInfo2KHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DisplayPlaneInfo2KHR &operator=(DisplayPlaneInfo2KHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplayPlaneInfo2KHR &operator=(VkDisplayPlaneInfo2KHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR &setMode(VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_) VULKAN_HPP_NOEXCEPT { mode = mode_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR &setPlaneIndex(uint32_t planeIndex_) VULKAN_HPP_NOEXCEPT { planeIndex = planeIndex_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDisplayPlaneInfo2KHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDisplayPlaneInfo2KHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, mode, planeIndex); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DisplayPlaneInfo2KHR const &) const = default; #else bool operator==(DisplayPlaneInfo2KHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (mode == rhs.mode) && (planeIndex == rhs.planeIndex); # endif } bool operator!=(DisplayPlaneInfo2KHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneInfo2KHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DisplayModeKHR mode = {}; uint32_t planeIndex = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR) == sizeof(VkDisplayPlaneInfo2KHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DisplayPlaneInfo2KHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = DisplayPlaneInfo2KHR; }; struct DisplayPlanePropertiesKHR { using NativeType = VkDisplayPlanePropertiesKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay_ = {}, uint32_t currentStackIndex_ = {}) VULKAN_HPP_NOEXCEPT : currentDisplay(currentDisplay_), currentStackIndex(currentStackIndex_) { } VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR(DisplayPlanePropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplayPlanePropertiesKHR(VkDisplayPlanePropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT : DisplayPlanePropertiesKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DisplayPlanePropertiesKHR &operator=(DisplayPlanePropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplayPlanePropertiesKHR &operator=(VkDisplayPlanePropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkDisplayPlanePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDisplayPlanePropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(currentDisplay, currentStackIndex); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DisplayPlanePropertiesKHR const &) const = default; #else bool operator==(DisplayPlanePropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (currentDisplay == rhs.currentDisplay) && (currentStackIndex == rhs.currentStackIndex); # endif } bool operator!=(DisplayPlanePropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay = {}; uint32_t currentStackIndex = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR) == sizeof(VkDisplayPlanePropertiesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DisplayPlanePropertiesKHR is not nothrow_move_constructible!"); struct DisplayPlaneProperties2KHR { using NativeType = VkDisplayPlaneProperties2KHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneProperties2KHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), displayPlaneProperties(displayPlaneProperties_) { } VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR(DisplayPlaneProperties2KHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplayPlaneProperties2KHR(VkDisplayPlaneProperties2KHR const &rhs) VULKAN_HPP_NOEXCEPT : DisplayPlaneProperties2KHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DisplayPlaneProperties2KHR &operator=(DisplayPlaneProperties2KHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplayPlaneProperties2KHR &operator=(VkDisplayPlaneProperties2KHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkDisplayPlaneProperties2KHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDisplayPlaneProperties2KHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, displayPlaneProperties); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DisplayPlaneProperties2KHR const &) const = default; #else bool operator==(DisplayPlaneProperties2KHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (displayPlaneProperties == rhs.displayPlaneProperties); # endif } bool operator!=(DisplayPlaneProperties2KHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneProperties2KHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR) == sizeof(VkDisplayPlaneProperties2KHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DisplayPlaneProperties2KHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = DisplayPlaneProperties2KHR; }; struct DisplayPowerInfoEXT { using NativeType = VkDisplayPowerInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPowerInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT(VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), powerState(powerState_) { } VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT(DisplayPowerInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplayPowerInfoEXT(VkDisplayPowerInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : DisplayPowerInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DisplayPowerInfoEXT &operator=(DisplayPowerInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplayPowerInfoEXT &operator=(VkDisplayPowerInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT &setPowerState(VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_) VULKAN_HPP_NOEXCEPT { powerState = powerState_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDisplayPowerInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDisplayPowerInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, powerState); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DisplayPowerInfoEXT const &) const = default; #else bool operator==(DisplayPowerInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (powerState == rhs.powerState); # endif } bool operator!=(DisplayPowerInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPowerInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT) == sizeof(VkDisplayPowerInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DisplayPowerInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = DisplayPowerInfoEXT; }; struct DisplayPresentInfoKHR { using NativeType = VkDisplayPresentInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPresentInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR(VULKAN_HPP_NAMESPACE::Rect2D srcRect_ = {}, VULKAN_HPP_NAMESPACE::Rect2D dstRect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 persistent_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), srcRect(srcRect_), dstRect(dstRect_), persistent(persistent_) { } VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR(DisplayPresentInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplayPresentInfoKHR(VkDisplayPresentInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : DisplayPresentInfoKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DisplayPresentInfoKHR &operator=(DisplayPresentInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplayPresentInfoKHR &operator=(VkDisplayPresentInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR &setSrcRect(VULKAN_HPP_NAMESPACE::Rect2D const &srcRect_) VULKAN_HPP_NOEXCEPT { srcRect = srcRect_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR &setDstRect(VULKAN_HPP_NAMESPACE::Rect2D const &dstRect_) VULKAN_HPP_NOEXCEPT { dstRect = dstRect_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR &setPersistent(VULKAN_HPP_NAMESPACE::Bool32 persistent_) VULKAN_HPP_NOEXCEPT { persistent = persistent_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDisplayPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDisplayPresentInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, srcRect, dstRect, persistent); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DisplayPresentInfoKHR const &) const = default; #else bool operator==(DisplayPresentInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (srcRect == rhs.srcRect) && (dstRect == rhs.dstRect) && (persistent == rhs.persistent); # endif } bool operator!=(DisplayPresentInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPresentInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Rect2D srcRect = {}; VULKAN_HPP_NAMESPACE::Rect2D dstRect = {}; VULKAN_HPP_NAMESPACE::Bool32 persistent = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR) == sizeof(VkDisplayPresentInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DisplayPresentInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = DisplayPresentInfoKHR; }; struct DisplayPropertiesKHR { using NativeType = VkDisplayPropertiesKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display_ = {}, const char *displayName_ = {}, VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions_ = {}, VULKAN_HPP_NAMESPACE::Extent2D physicalResolution_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible_ = {}, VULKAN_HPP_NAMESPACE::Bool32 persistentContent_ = {}) VULKAN_HPP_NOEXCEPT : display(display_), displayName(displayName_), physicalDimensions(physicalDimensions_), physicalResolution(physicalResolution_), supportedTransforms(supportedTransforms_), planeReorderPossible(planeReorderPossible_), persistentContent(persistentContent_) { } VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR(DisplayPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplayPropertiesKHR(VkDisplayPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT : DisplayPropertiesKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DisplayPropertiesKHR &operator=(DisplayPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplayPropertiesKHR &operator=(VkDisplayPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkDisplayPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDisplayPropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(display, displayName, physicalDimensions, physicalResolution, supportedTransforms, planeReorderPossible, persistentContent); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) std::strong_ordering operator<=>(DisplayPropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { if(auto cmp = display <=> rhs.display; cmp != 0) return cmp; if(displayName != rhs.displayName) if(auto cmp = strcmp(displayName, rhs.displayName); cmp != 0) return (cmp < 0) ? std::strong_ordering::less : std::strong_ordering::greater; if(auto cmp = physicalDimensions <=> rhs.physicalDimensions; cmp != 0) return cmp; if(auto cmp = physicalResolution <=> rhs.physicalResolution; cmp != 0) return cmp; if(auto cmp = supportedTransforms <=> rhs.supportedTransforms; cmp != 0) return cmp; if(auto cmp = planeReorderPossible <=> rhs.planeReorderPossible; cmp != 0) return cmp; if(auto cmp = persistentContent <=> rhs.persistentContent; cmp != 0) return cmp; return std::strong_ordering::equivalent; } #endif bool operator==(DisplayPropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return (display == rhs.display) && ((displayName == rhs.displayName) || (strcmp(displayName, rhs.displayName) == 0)) && (physicalDimensions == rhs.physicalDimensions) && (physicalResolution == rhs.physicalResolution) && (supportedTransforms == rhs.supportedTransforms) && (planeReorderPossible == rhs.planeReorderPossible) && (persistentContent == rhs.persistentContent); } bool operator!=(DisplayPropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } public: VULKAN_HPP_NAMESPACE::DisplayKHR display = {}; const char *displayName = {}; VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions = {}; VULKAN_HPP_NAMESPACE::Extent2D physicalResolution = {}; VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible = {}; VULKAN_HPP_NAMESPACE::Bool32 persistentContent = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR) == sizeof(VkDisplayPropertiesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DisplayPropertiesKHR is not nothrow_move_constructible!"); struct DisplayProperties2KHR { using NativeType = VkDisplayProperties2KHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayProperties2KHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DisplayProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), displayProperties(displayProperties_) { } VULKAN_HPP_CONSTEXPR DisplayProperties2KHR(DisplayProperties2KHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplayProperties2KHR(VkDisplayProperties2KHR const &rhs) VULKAN_HPP_NOEXCEPT : DisplayProperties2KHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DisplayProperties2KHR &operator=(DisplayProperties2KHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplayProperties2KHR &operator=(VkDisplayProperties2KHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkDisplayProperties2KHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDisplayProperties2KHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, displayProperties); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DisplayProperties2KHR const &) const = default; #else bool operator==(DisplayProperties2KHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (displayProperties == rhs.displayProperties); # endif } bool operator!=(DisplayProperties2KHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayProperties2KHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DisplayProperties2KHR) == sizeof(VkDisplayProperties2KHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DisplayProperties2KHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = DisplayProperties2KHR; }; struct DisplaySurfaceCreateInfoKHR { using NativeType = VkDisplaySurfaceCreateInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplaySurfaceCreateInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, uint32_t planeIndex_ = {}, uint32_t planeStackIndex_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, float globalAlpha_ = {}, VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque, VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), displayMode(displayMode_), planeIndex(planeIndex_), planeStackIndex(planeStackIndex_), transform(transform_), globalAlpha(globalAlpha_), alphaMode(alphaMode_), imageExtent(imageExtent_) { } VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR(DisplaySurfaceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplaySurfaceCreateInfoKHR(VkDisplaySurfaceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : DisplaySurfaceCreateInfoKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DisplaySurfaceCreateInfoKHR &operator=(DisplaySurfaceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; DisplaySurfaceCreateInfoKHR &operator=(VkDisplaySurfaceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR &setFlags(VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR &setDisplayMode(VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_) VULKAN_HPP_NOEXCEPT { displayMode = displayMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR &setPlaneIndex(uint32_t planeIndex_) VULKAN_HPP_NOEXCEPT { planeIndex = planeIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR &setPlaneStackIndex(uint32_t planeStackIndex_) VULKAN_HPP_NOEXCEPT { planeStackIndex = planeStackIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR &setTransform(VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_) VULKAN_HPP_NOEXCEPT { transform = transform_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR &setGlobalAlpha(float globalAlpha_) VULKAN_HPP_NOEXCEPT { globalAlpha = globalAlpha_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR &setAlphaMode(VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_) VULKAN_HPP_NOEXCEPT { alphaMode = alphaMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR &setImageExtent(VULKAN_HPP_NAMESPACE::Extent2D const &imageExtent_) VULKAN_HPP_NOEXCEPT { imageExtent = imageExtent_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDisplaySurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDisplaySurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, displayMode, planeIndex, planeStackIndex, transform, globalAlpha, alphaMode, imageExtent); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DisplaySurfaceCreateInfoKHR const &) const = default; #else bool operator==(DisplaySurfaceCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (displayMode == rhs.displayMode) && (planeIndex == rhs.planeIndex) && (planeStackIndex == rhs.planeStackIndex) && (transform == rhs.transform) && (globalAlpha == rhs.globalAlpha) && (alphaMode == rhs.alphaMode) && (imageExtent == rhs.imageExtent); # endif } bool operator!=(DisplaySurfaceCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags = {}; VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {}; uint32_t planeIndex = {}; uint32_t planeStackIndex = {}; VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; float globalAlpha = {}; VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque; VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR) == sizeof(VkDisplaySurfaceCreateInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DisplaySurfaceCreateInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = DisplaySurfaceCreateInfoKHR; }; struct DrawIndexedIndirectCommand { using NativeType = VkDrawIndexedIndirectCommand; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand(uint32_t indexCount_ = {}, uint32_t instanceCount_ = {}, uint32_t firstIndex_ = {}, int32_t vertexOffset_ = {}, uint32_t firstInstance_ = {}) VULKAN_HPP_NOEXCEPT : indexCount(indexCount_), instanceCount(instanceCount_), firstIndex(firstIndex_), vertexOffset(vertexOffset_), firstInstance(firstInstance_) { } VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand(DrawIndexedIndirectCommand const &rhs) VULKAN_HPP_NOEXCEPT = default; DrawIndexedIndirectCommand(VkDrawIndexedIndirectCommand const &rhs) VULKAN_HPP_NOEXCEPT : DrawIndexedIndirectCommand(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DrawIndexedIndirectCommand &operator=(DrawIndexedIndirectCommand const &rhs) VULKAN_HPP_NOEXCEPT = default; DrawIndexedIndirectCommand &operator=(VkDrawIndexedIndirectCommand const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand &setIndexCount(uint32_t indexCount_) VULKAN_HPP_NOEXCEPT { indexCount = indexCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand &setInstanceCount(uint32_t instanceCount_) VULKAN_HPP_NOEXCEPT { instanceCount = instanceCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand &setFirstIndex(uint32_t firstIndex_) VULKAN_HPP_NOEXCEPT { firstIndex = firstIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand &setVertexOffset(int32_t vertexOffset_) VULKAN_HPP_NOEXCEPT { vertexOffset = vertexOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand &setFirstInstance(uint32_t firstInstance_) VULKAN_HPP_NOEXCEPT { firstInstance = firstInstance_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDrawIndexedIndirectCommand const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDrawIndexedIndirectCommand &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(indexCount, instanceCount, firstIndex, vertexOffset, firstInstance); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DrawIndexedIndirectCommand const &) const = default; #else bool operator==(DrawIndexedIndirectCommand const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (indexCount == rhs.indexCount) && (instanceCount == rhs.instanceCount) && (firstIndex == rhs.firstIndex) && (vertexOffset == rhs.vertexOffset) && (firstInstance == rhs.firstInstance); # endif } bool operator!=(DrawIndexedIndirectCommand const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t indexCount = {}; uint32_t instanceCount = {}; uint32_t firstIndex = {}; int32_t vertexOffset = {}; uint32_t firstInstance = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand) == sizeof(VkDrawIndexedIndirectCommand), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DrawIndexedIndirectCommand is not nothrow_move_constructible!"); struct DrawIndirectCommand { using NativeType = VkDrawIndirectCommand; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DrawIndirectCommand(uint32_t vertexCount_ = {}, uint32_t instanceCount_ = {}, uint32_t firstVertex_ = {}, uint32_t firstInstance_ = {}) VULKAN_HPP_NOEXCEPT : vertexCount(vertexCount_), instanceCount(instanceCount_), firstVertex(firstVertex_), firstInstance(firstInstance_) { } VULKAN_HPP_CONSTEXPR DrawIndirectCommand(DrawIndirectCommand const &rhs) VULKAN_HPP_NOEXCEPT = default; DrawIndirectCommand(VkDrawIndirectCommand const &rhs) VULKAN_HPP_NOEXCEPT : DrawIndirectCommand(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DrawIndirectCommand &operator=(DrawIndirectCommand const &rhs) VULKAN_HPP_NOEXCEPT = default; DrawIndirectCommand &operator=(VkDrawIndirectCommand const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand &setVertexCount(uint32_t vertexCount_) VULKAN_HPP_NOEXCEPT { vertexCount = vertexCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand &setInstanceCount(uint32_t instanceCount_) VULKAN_HPP_NOEXCEPT { instanceCount = instanceCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand &setFirstVertex(uint32_t firstVertex_) VULKAN_HPP_NOEXCEPT { firstVertex = firstVertex_; return *this; } VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand &setFirstInstance(uint32_t firstInstance_) VULKAN_HPP_NOEXCEPT { firstInstance = firstInstance_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDrawIndirectCommand const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDrawIndirectCommand &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(vertexCount, instanceCount, firstVertex, firstInstance); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DrawIndirectCommand const &) const = default; #else bool operator==(DrawIndirectCommand const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (vertexCount == rhs.vertexCount) && (instanceCount == rhs.instanceCount) && (firstVertex == rhs.firstVertex) && (firstInstance == rhs.firstInstance); # endif } bool operator!=(DrawIndirectCommand const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t vertexCount = {}; uint32_t instanceCount = {}; uint32_t firstVertex = {}; uint32_t firstInstance = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DrawIndirectCommand) == sizeof(VkDrawIndirectCommand), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DrawIndirectCommand is not nothrow_move_constructible!"); struct DrawMeshTasksIndirectCommandNV { using NativeType = VkDrawMeshTasksIndirectCommandNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV(uint32_t taskCount_ = {}, uint32_t firstTask_ = {}) VULKAN_HPP_NOEXCEPT : taskCount(taskCount_), firstTask(firstTask_) { } VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV(DrawMeshTasksIndirectCommandNV const &rhs) VULKAN_HPP_NOEXCEPT = default; DrawMeshTasksIndirectCommandNV(VkDrawMeshTasksIndirectCommandNV const &rhs) VULKAN_HPP_NOEXCEPT : DrawMeshTasksIndirectCommandNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DrawMeshTasksIndirectCommandNV &operator=(DrawMeshTasksIndirectCommandNV const &rhs) VULKAN_HPP_NOEXCEPT = default; DrawMeshTasksIndirectCommandNV &operator=(VkDrawMeshTasksIndirectCommandNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandNV &setTaskCount(uint32_t taskCount_) VULKAN_HPP_NOEXCEPT { taskCount = taskCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandNV &setFirstTask(uint32_t firstTask_) VULKAN_HPP_NOEXCEPT { firstTask = firstTask_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkDrawMeshTasksIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDrawMeshTasksIndirectCommandNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(taskCount, firstTask); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DrawMeshTasksIndirectCommandNV const &) const = default; #else bool operator==(DrawMeshTasksIndirectCommandNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (taskCount == rhs.taskCount) && (firstTask == rhs.firstTask); # endif } bool operator!=(DrawMeshTasksIndirectCommandNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t taskCount = {}; uint32_t firstTask = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV) == sizeof(VkDrawMeshTasksIndirectCommandNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DrawMeshTasksIndirectCommandNV is not nothrow_move_constructible!"); struct DrmFormatModifierProperties2EXT { using NativeType = VkDrmFormatModifierProperties2EXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DrmFormatModifierProperties2EXT(uint64_t drmFormatModifier_ = {}, uint32_t drmFormatModifierPlaneCount_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 drmFormatModifierTilingFeatures_ = {}) VULKAN_HPP_NOEXCEPT : drmFormatModifier(drmFormatModifier_), drmFormatModifierPlaneCount(drmFormatModifierPlaneCount_), drmFormatModifierTilingFeatures(drmFormatModifierTilingFeatures_) { } VULKAN_HPP_CONSTEXPR DrmFormatModifierProperties2EXT(DrmFormatModifierProperties2EXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DrmFormatModifierProperties2EXT(VkDrmFormatModifierProperties2EXT const &rhs) VULKAN_HPP_NOEXCEPT : DrmFormatModifierProperties2EXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DrmFormatModifierProperties2EXT &operator=(DrmFormatModifierProperties2EXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DrmFormatModifierProperties2EXT &operator=(VkDrmFormatModifierProperties2EXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkDrmFormatModifierProperties2EXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDrmFormatModifierProperties2EXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(drmFormatModifier, drmFormatModifierPlaneCount, drmFormatModifierTilingFeatures); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DrmFormatModifierProperties2EXT const &) const = default; #else bool operator==(DrmFormatModifierProperties2EXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (drmFormatModifier == rhs.drmFormatModifier) && (drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount) && (drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures); # endif } bool operator!=(DrmFormatModifierProperties2EXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint64_t drmFormatModifier = {}; uint32_t drmFormatModifierPlaneCount = {}; VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 drmFormatModifierTilingFeatures = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT) == sizeof(VkDrmFormatModifierProperties2EXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DrmFormatModifierProperties2EXT is not nothrow_move_constructible!"); struct DrmFormatModifierPropertiesEXT { using NativeType = VkDrmFormatModifierPropertiesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT(uint64_t drmFormatModifier_ = {}, uint32_t drmFormatModifierPlaneCount_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures_ = {}) VULKAN_HPP_NOEXCEPT : drmFormatModifier(drmFormatModifier_), drmFormatModifierPlaneCount(drmFormatModifierPlaneCount_), drmFormatModifierTilingFeatures(drmFormatModifierTilingFeatures_) { } VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT(DrmFormatModifierPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DrmFormatModifierPropertiesEXT(VkDrmFormatModifierPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT : DrmFormatModifierPropertiesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DrmFormatModifierPropertiesEXT &operator=(DrmFormatModifierPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DrmFormatModifierPropertiesEXT &operator=(VkDrmFormatModifierPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(drmFormatModifier, drmFormatModifierPlaneCount, drmFormatModifierTilingFeatures); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DrmFormatModifierPropertiesEXT const &) const = default; #else bool operator==(DrmFormatModifierPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (drmFormatModifier == rhs.drmFormatModifier) && (drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount) && (drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures); # endif } bool operator!=(DrmFormatModifierPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint64_t drmFormatModifier = {}; uint32_t drmFormatModifierPlaneCount = {}; VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT) == sizeof(VkDrmFormatModifierPropertiesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DrmFormatModifierPropertiesEXT is not nothrow_move_constructible!"); struct DrmFormatModifierPropertiesList2EXT { using NativeType = VkDrmFormatModifierPropertiesList2EXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDrmFormatModifierPropertiesList2EXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesList2EXT(uint32_t drmFormatModifierCount_ = {}, VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT *pDrmFormatModifierProperties_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), drmFormatModifierCount(drmFormatModifierCount_), pDrmFormatModifierProperties(pDrmFormatModifierProperties_) { } VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesList2EXT(DrmFormatModifierPropertiesList2EXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DrmFormatModifierPropertiesList2EXT(VkDrmFormatModifierPropertiesList2EXT const &rhs) VULKAN_HPP_NOEXCEPT : DrmFormatModifierPropertiesList2EXT(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DrmFormatModifierPropertiesList2EXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &drmFormatModifierProperties_, void *pNext_ = nullptr) : pNext(pNext_) , drmFormatModifierCount(static_cast(drmFormatModifierProperties_.size())) , pDrmFormatModifierProperties(drmFormatModifierProperties_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DrmFormatModifierPropertiesList2EXT &operator=(DrmFormatModifierPropertiesList2EXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DrmFormatModifierPropertiesList2EXT &operator=(VkDrmFormatModifierPropertiesList2EXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkDrmFormatModifierPropertiesList2EXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDrmFormatModifierPropertiesList2EXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, drmFormatModifierCount, pDrmFormatModifierProperties); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DrmFormatModifierPropertiesList2EXT const &) const = default; #else bool operator==(DrmFormatModifierPropertiesList2EXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (drmFormatModifierCount == rhs.drmFormatModifierCount) && (pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties); # endif } bool operator!=(DrmFormatModifierPropertiesList2EXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesList2EXT; void *pNext = {}; uint32_t drmFormatModifierCount = {}; VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT *pDrmFormatModifierProperties = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT) == sizeof(VkDrmFormatModifierPropertiesList2EXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DrmFormatModifierPropertiesList2EXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = DrmFormatModifierPropertiesList2EXT; }; struct DrmFormatModifierPropertiesListEXT { using NativeType = VkDrmFormatModifierPropertiesListEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDrmFormatModifierPropertiesListEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT(uint32_t drmFormatModifierCount_ = {}, VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT *pDrmFormatModifierProperties_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), drmFormatModifierCount(drmFormatModifierCount_), pDrmFormatModifierProperties(pDrmFormatModifierProperties_) { } VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT(DrmFormatModifierPropertiesListEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DrmFormatModifierPropertiesListEXT(VkDrmFormatModifierPropertiesListEXT const &rhs) VULKAN_HPP_NOEXCEPT : DrmFormatModifierPropertiesListEXT(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) DrmFormatModifierPropertiesListEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &drmFormatModifierProperties_, void *pNext_ = nullptr) : pNext(pNext_) , drmFormatModifierCount(static_cast(drmFormatModifierProperties_.size())) , pDrmFormatModifierProperties(drmFormatModifierProperties_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ DrmFormatModifierPropertiesListEXT &operator=(DrmFormatModifierPropertiesListEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; DrmFormatModifierPropertiesListEXT &operator=(VkDrmFormatModifierPropertiesListEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkDrmFormatModifierPropertiesListEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkDrmFormatModifierPropertiesListEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, drmFormatModifierCount, pDrmFormatModifierProperties); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(DrmFormatModifierPropertiesListEXT const &) const = default; #else bool operator==(DrmFormatModifierPropertiesListEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (drmFormatModifierCount == rhs.drmFormatModifierCount) && (pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties); # endif } bool operator!=(DrmFormatModifierPropertiesListEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT; void *pNext = {}; uint32_t drmFormatModifierCount = {}; VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT *pDrmFormatModifierProperties = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT) == sizeof(VkDrmFormatModifierPropertiesListEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "DrmFormatModifierPropertiesListEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = DrmFormatModifierPropertiesListEXT; }; struct EventCreateInfo { using NativeType = VkEventCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eEventCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR EventCreateInfo(VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_) { } VULKAN_HPP_CONSTEXPR EventCreateInfo(EventCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; EventCreateInfo(VkEventCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : EventCreateInfo(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ EventCreateInfo &operator=(EventCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; EventCreateInfo &operator=(VkEventCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 EventCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 EventCreateInfo &setFlags(VULKAN_HPP_NAMESPACE::EventCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkEventCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkEventCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(EventCreateInfo const &) const = default; #else bool operator==(EventCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags); # endif } bool operator!=(EventCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eEventCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::EventCreateFlags flags = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::EventCreateInfo) == sizeof(VkEventCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "EventCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = EventCreateInfo; }; struct ExportFenceCreateInfo { using NativeType = VkExportFenceCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo(VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), handleTypes(handleTypes_) { } VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo(ExportFenceCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; ExportFenceCreateInfo(VkExportFenceCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : ExportFenceCreateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ExportFenceCreateInfo &operator=(ExportFenceCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; ExportFenceCreateInfo &operator=(VkExportFenceCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo &setHandleTypes(VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_) VULKAN_HPP_NOEXCEPT { handleTypes = handleTypes_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkExportFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkExportFenceCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, handleTypes); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ExportFenceCreateInfo const &) const = default; #else bool operator==(ExportFenceCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (handleTypes == rhs.handleTypes); # endif } bool operator!=(ExportFenceCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo) == sizeof(VkExportFenceCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ExportFenceCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = ExportFenceCreateInfo; }; using ExportFenceCreateInfoKHR = ExportFenceCreateInfo; #if defined(VK_USE_PLATFORM_WIN32_KHR) struct ExportFenceWin32HandleInfoKHR { using NativeType = VkExportFenceWin32HandleInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceWin32HandleInfoKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR(const SECURITY_ATTRIBUTES *pAttributes_ = {}, DWORD dwAccess_ = {}, LPCWSTR name_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), pAttributes(pAttributes_), dwAccess(dwAccess_), name(name_) { } VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR(ExportFenceWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; ExportFenceWin32HandleInfoKHR(VkExportFenceWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : ExportFenceWin32HandleInfoKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ExportFenceWin32HandleInfoKHR &operator=(ExportFenceWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; ExportFenceWin32HandleInfoKHR &operator=(VkExportFenceWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR &setPAttributes(const SECURITY_ATTRIBUTES *pAttributes_) VULKAN_HPP_NOEXCEPT { pAttributes = pAttributes_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR &setDwAccess(DWORD dwAccess_) VULKAN_HPP_NOEXCEPT { dwAccess = dwAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR &setName(LPCWSTR name_) VULKAN_HPP_NOEXCEPT { name = name_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkExportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkExportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, pAttributes, dwAccess, name); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ExportFenceWin32HandleInfoKHR const &) const = default; # else bool operator==(ExportFenceWin32HandleInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (pAttributes == rhs.pAttributes) && (dwAccess == rhs.dwAccess) && (name == rhs.name); # endif } bool operator!=(ExportFenceWin32HandleInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceWin32HandleInfoKHR; const void *pNext = {}; const SECURITY_ATTRIBUTES *pAttributes = {}; DWORD dwAccess = {}; LPCWSTR name = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR) == sizeof(VkExportFenceWin32HandleInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ExportFenceWin32HandleInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = ExportFenceWin32HandleInfoKHR; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ struct ExportMemoryAllocateInfo { using NativeType = VkExportMemoryAllocateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), handleTypes(handleTypes_) { } VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo(ExportMemoryAllocateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; ExportMemoryAllocateInfo(VkExportMemoryAllocateInfo const &rhs) VULKAN_HPP_NOEXCEPT : ExportMemoryAllocateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ExportMemoryAllocateInfo &operator=(ExportMemoryAllocateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; ExportMemoryAllocateInfo &operator=(VkExportMemoryAllocateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo &setHandleTypes(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_) VULKAN_HPP_NOEXCEPT { handleTypes = handleTypes_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkExportMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkExportMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, handleTypes); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ExportMemoryAllocateInfo const &) const = default; #else bool operator==(ExportMemoryAllocateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (handleTypes == rhs.handleTypes); # endif } bool operator!=(ExportMemoryAllocateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo) == sizeof(VkExportMemoryAllocateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ExportMemoryAllocateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = ExportMemoryAllocateInfo; }; using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo; struct ExportMemoryAllocateInfoNV { using NativeType = VkExportMemoryAllocateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfoNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), handleTypes(handleTypes_) { } VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV(ExportMemoryAllocateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; ExportMemoryAllocateInfoNV(VkExportMemoryAllocateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : ExportMemoryAllocateInfoNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ExportMemoryAllocateInfoNV &operator=(ExportMemoryAllocateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; ExportMemoryAllocateInfoNV &operator=(VkExportMemoryAllocateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfoNV & setHandleTypes(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_) VULKAN_HPP_NOEXCEPT { handleTypes = handleTypes_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkExportMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkExportMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, handleTypes); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ExportMemoryAllocateInfoNV const &) const = default; #else bool operator==(ExportMemoryAllocateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (handleTypes == rhs.handleTypes); # endif } bool operator!=(ExportMemoryAllocateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfoNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV) == sizeof(VkExportMemoryAllocateInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ExportMemoryAllocateInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = ExportMemoryAllocateInfoNV; }; #if defined(VK_USE_PLATFORM_WIN32_KHR) struct ExportMemoryWin32HandleInfoKHR { using NativeType = VkExportMemoryWin32HandleInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR(const SECURITY_ATTRIBUTES *pAttributes_ = {}, DWORD dwAccess_ = {}, LPCWSTR name_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), pAttributes(pAttributes_), dwAccess(dwAccess_), name(name_) { } VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR(ExportMemoryWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; ExportMemoryWin32HandleInfoKHR(VkExportMemoryWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : ExportMemoryWin32HandleInfoKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ExportMemoryWin32HandleInfoKHR &operator=(ExportMemoryWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; ExportMemoryWin32HandleInfoKHR &operator=(VkExportMemoryWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR &setPAttributes(const SECURITY_ATTRIBUTES *pAttributes_) VULKAN_HPP_NOEXCEPT { pAttributes = pAttributes_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR &setDwAccess(DWORD dwAccess_) VULKAN_HPP_NOEXCEPT { dwAccess = dwAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR &setName(LPCWSTR name_) VULKAN_HPP_NOEXCEPT { name = name_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkExportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkExportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, pAttributes, dwAccess, name); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ExportMemoryWin32HandleInfoKHR const &) const = default; # else bool operator==(ExportMemoryWin32HandleInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (pAttributes == rhs.pAttributes) && (dwAccess == rhs.dwAccess) && (name == rhs.name); # endif } bool operator!=(ExportMemoryWin32HandleInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoKHR; const void *pNext = {}; const SECURITY_ATTRIBUTES *pAttributes = {}; DWORD dwAccess = {}; LPCWSTR name = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR) == sizeof(VkExportMemoryWin32HandleInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ExportMemoryWin32HandleInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = ExportMemoryWin32HandleInfoKHR; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #if defined(VK_USE_PLATFORM_WIN32_KHR) struct ExportMemoryWin32HandleInfoNV { using NativeType = VkExportMemoryWin32HandleInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoNV; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV(const SECURITY_ATTRIBUTES *pAttributes_ = {}, DWORD dwAccess_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), pAttributes(pAttributes_), dwAccess(dwAccess_) { } VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV(ExportMemoryWin32HandleInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; ExportMemoryWin32HandleInfoNV(VkExportMemoryWin32HandleInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : ExportMemoryWin32HandleInfoNV(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ExportMemoryWin32HandleInfoNV &operator=(ExportMemoryWin32HandleInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; ExportMemoryWin32HandleInfoNV &operator=(VkExportMemoryWin32HandleInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV &setPAttributes(const SECURITY_ATTRIBUTES *pAttributes_) VULKAN_HPP_NOEXCEPT { pAttributes = pAttributes_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV &setDwAccess(DWORD dwAccess_) VULKAN_HPP_NOEXCEPT { dwAccess = dwAccess_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkExportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkExportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, pAttributes, dwAccess); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ExportMemoryWin32HandleInfoNV const &) const = default; # else bool operator==(ExportMemoryWin32HandleInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (pAttributes == rhs.pAttributes) && (dwAccess == rhs.dwAccess); # endif } bool operator!=(ExportMemoryWin32HandleInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoNV; const void *pNext = {}; const SECURITY_ATTRIBUTES *pAttributes = {}; DWORD dwAccess = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV) == sizeof(VkExportMemoryWin32HandleInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ExportMemoryWin32HandleInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = ExportMemoryWin32HandleInfoNV; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ struct ExportSemaphoreCreateInfo { using NativeType = VkExportSemaphoreCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), handleTypes(handleTypes_) { } VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo(ExportSemaphoreCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; ExportSemaphoreCreateInfo(VkExportSemaphoreCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : ExportSemaphoreCreateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ExportSemaphoreCreateInfo &operator=(ExportSemaphoreCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; ExportSemaphoreCreateInfo &operator=(VkExportSemaphoreCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo & setHandleTypes(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_) VULKAN_HPP_NOEXCEPT { handleTypes = handleTypes_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkExportSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkExportSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, handleTypes); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ExportSemaphoreCreateInfo const &) const = default; #else bool operator==(ExportSemaphoreCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (handleTypes == rhs.handleTypes); # endif } bool operator!=(ExportSemaphoreCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo) == sizeof(VkExportSemaphoreCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ExportSemaphoreCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = ExportSemaphoreCreateInfo; }; using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo; #if defined(VK_USE_PLATFORM_WIN32_KHR) struct ExportSemaphoreWin32HandleInfoKHR { using NativeType = VkExportSemaphoreWin32HandleInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreWin32HandleInfoKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR(const SECURITY_ATTRIBUTES *pAttributes_ = {}, DWORD dwAccess_ = {}, LPCWSTR name_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), pAttributes(pAttributes_), dwAccess(dwAccess_), name(name_) { } VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR(ExportSemaphoreWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; ExportSemaphoreWin32HandleInfoKHR(VkExportSemaphoreWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : ExportSemaphoreWin32HandleInfoKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ExportSemaphoreWin32HandleInfoKHR &operator=(ExportSemaphoreWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; ExportSemaphoreWin32HandleInfoKHR &operator=(VkExportSemaphoreWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR &setPAttributes(const SECURITY_ATTRIBUTES *pAttributes_) VULKAN_HPP_NOEXCEPT { pAttributes = pAttributes_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR &setDwAccess(DWORD dwAccess_) VULKAN_HPP_NOEXCEPT { dwAccess = dwAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR &setName(LPCWSTR name_) VULKAN_HPP_NOEXCEPT { name = name_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkExportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkExportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, pAttributes, dwAccess, name); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ExportSemaphoreWin32HandleInfoKHR const &) const = default; # else bool operator==(ExportSemaphoreWin32HandleInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (pAttributes == rhs.pAttributes) && (dwAccess == rhs.dwAccess) && (name == rhs.name); # endif } bool operator!=(ExportSemaphoreWin32HandleInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreWin32HandleInfoKHR; const void *pNext = {}; const SECURITY_ATTRIBUTES *pAttributes = {}; DWORD dwAccess = {}; LPCWSTR name = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR) == sizeof(VkExportSemaphoreWin32HandleInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ExportSemaphoreWin32HandleInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = ExportSemaphoreWin32HandleInfoKHR; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ struct ExtensionProperties { using NativeType = VkExtensionProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 ExtensionProperties(std::array const &extensionName_ = {}, uint32_t specVersion_ = {}) VULKAN_HPP_NOEXCEPT : extensionName(extensionName_), specVersion(specVersion_) { } VULKAN_HPP_CONSTEXPR_14 ExtensionProperties(ExtensionProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; ExtensionProperties(VkExtensionProperties const &rhs) VULKAN_HPP_NOEXCEPT : ExtensionProperties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ExtensionProperties &operator=(ExtensionProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; ExtensionProperties &operator=(VkExtensionProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkExtensionProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkExtensionProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &, uint32_t const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(extensionName, specVersion); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ExtensionProperties const &) const = default; #else bool operator==(ExtensionProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (extensionName == rhs.extensionName) && (specVersion == rhs.specVersion); # endif } bool operator!=(ExtensionProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::ArrayWrapper1D extensionName = {}; uint32_t specVersion = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ExtensionProperties) == sizeof(VkExtensionProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ExtensionProperties is not nothrow_move_constructible!"); struct ExternalMemoryProperties { using NativeType = VkExternalMemoryProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ExternalMemoryProperties(VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes_ = {}) VULKAN_HPP_NOEXCEPT : externalMemoryFeatures(externalMemoryFeatures_), exportFromImportedHandleTypes(exportFromImportedHandleTypes_), compatibleHandleTypes(compatibleHandleTypes_) { } VULKAN_HPP_CONSTEXPR ExternalMemoryProperties(ExternalMemoryProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; ExternalMemoryProperties(VkExternalMemoryProperties const &rhs) VULKAN_HPP_NOEXCEPT : ExternalMemoryProperties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ExternalMemoryProperties &operator=(ExternalMemoryProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; ExternalMemoryProperties &operator=(VkExternalMemoryProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkExternalMemoryProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkExternalMemoryProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(externalMemoryFeatures, exportFromImportedHandleTypes, compatibleHandleTypes); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ExternalMemoryProperties const &) const = default; #else bool operator==(ExternalMemoryProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (externalMemoryFeatures == rhs.externalMemoryFeatures) && (exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes) && (compatibleHandleTypes == rhs.compatibleHandleTypes); # endif } bool operator!=(ExternalMemoryProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ExternalMemoryProperties) == sizeof(VkExternalMemoryProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ExternalMemoryProperties is not nothrow_move_constructible!"); using ExternalMemoryPropertiesKHR = ExternalMemoryProperties; struct ExternalBufferProperties { using NativeType = VkExternalBufferProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalBufferProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ExternalBufferProperties(VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), externalMemoryProperties(externalMemoryProperties_) { } VULKAN_HPP_CONSTEXPR ExternalBufferProperties(ExternalBufferProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; ExternalBufferProperties(VkExternalBufferProperties const &rhs) VULKAN_HPP_NOEXCEPT : ExternalBufferProperties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ExternalBufferProperties &operator=(ExternalBufferProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; ExternalBufferProperties &operator=(VkExternalBufferProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkExternalBufferProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkExternalBufferProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, externalMemoryProperties); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ExternalBufferProperties const &) const = default; #else bool operator==(ExternalBufferProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (externalMemoryProperties == rhs.externalMemoryProperties); # endif } bool operator!=(ExternalBufferProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalBufferProperties; void *pNext = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ExternalBufferProperties) == sizeof(VkExternalBufferProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ExternalBufferProperties is not nothrow_move_constructible!"); template<> struct CppType { using Type = ExternalBufferProperties; }; using ExternalBufferPropertiesKHR = ExternalBufferProperties; struct ExternalFenceProperties { using NativeType = VkExternalFenceProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFenceProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ExternalFenceProperties(VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), exportFromImportedHandleTypes(exportFromImportedHandleTypes_), compatibleHandleTypes(compatibleHandleTypes_), externalFenceFeatures(externalFenceFeatures_) { } VULKAN_HPP_CONSTEXPR ExternalFenceProperties(ExternalFenceProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; ExternalFenceProperties(VkExternalFenceProperties const &rhs) VULKAN_HPP_NOEXCEPT : ExternalFenceProperties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ExternalFenceProperties &operator=(ExternalFenceProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; ExternalFenceProperties &operator=(VkExternalFenceProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkExternalFenceProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkExternalFenceProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, exportFromImportedHandleTypes, compatibleHandleTypes, externalFenceFeatures); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ExternalFenceProperties const &) const = default; #else bool operator==(ExternalFenceProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes) && (compatibleHandleTypes == rhs.compatibleHandleTypes) && (externalFenceFeatures == rhs.externalFenceFeatures); # endif } bool operator!=(ExternalFenceProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFenceProperties; void *pNext = {}; VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes = {}; VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes = {}; VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ExternalFenceProperties) == sizeof(VkExternalFenceProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ExternalFenceProperties is not nothrow_move_constructible!"); template<> struct CppType { using Type = ExternalFenceProperties; }; using ExternalFencePropertiesKHR = ExternalFenceProperties; #if defined(VK_USE_PLATFORM_ANDROID_KHR) struct ExternalFormatANDROID { using NativeType = VkExternalFormatANDROID; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFormatANDROID; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ExternalFormatANDROID(uint64_t externalFormat_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), externalFormat(externalFormat_) { } VULKAN_HPP_CONSTEXPR ExternalFormatANDROID(ExternalFormatANDROID const &rhs) VULKAN_HPP_NOEXCEPT = default; ExternalFormatANDROID(VkExternalFormatANDROID const &rhs) VULKAN_HPP_NOEXCEPT : ExternalFormatANDROID(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ExternalFormatANDROID &operator=(ExternalFormatANDROID const &rhs) VULKAN_HPP_NOEXCEPT = default; ExternalFormatANDROID &operator=(VkExternalFormatANDROID const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ExternalFormatANDROID &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExternalFormatANDROID &setExternalFormat(uint64_t externalFormat_) VULKAN_HPP_NOEXCEPT { externalFormat = externalFormat_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkExternalFormatANDROID const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkExternalFormatANDROID &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, externalFormat); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ExternalFormatANDROID const &) const = default; # else bool operator==(ExternalFormatANDROID const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (externalFormat == rhs.externalFormat); # endif } bool operator!=(ExternalFormatANDROID const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFormatANDROID; void *pNext = {}; uint64_t externalFormat = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ExternalFormatANDROID) == sizeof(VkExternalFormatANDROID), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ExternalFormatANDROID is not nothrow_move_constructible!"); template<> struct CppType { using Type = ExternalFormatANDROID; }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ struct ExternalImageFormatProperties { using NativeType = VkExternalImageFormatProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalImageFormatProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties(VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), externalMemoryProperties(externalMemoryProperties_) { } VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties(ExternalImageFormatProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; ExternalImageFormatProperties(VkExternalImageFormatProperties const &rhs) VULKAN_HPP_NOEXCEPT : ExternalImageFormatProperties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ExternalImageFormatProperties &operator=(ExternalImageFormatProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; ExternalImageFormatProperties &operator=(VkExternalImageFormatProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkExternalImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkExternalImageFormatProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, externalMemoryProperties); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ExternalImageFormatProperties const &) const = default; #else bool operator==(ExternalImageFormatProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (externalMemoryProperties == rhs.externalMemoryProperties); # endif } bool operator!=(ExternalImageFormatProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalImageFormatProperties; void *pNext = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties) == sizeof(VkExternalImageFormatProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ExternalImageFormatProperties is not nothrow_move_constructible!"); template<> struct CppType { using Type = ExternalImageFormatProperties; }; using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties; struct ImageFormatProperties { using NativeType = VkImageFormatProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImageFormatProperties(VULKAN_HPP_NAMESPACE::Extent3D maxExtent_ = {}, uint32_t maxMipLevels_ = {}, uint32_t maxArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize_ = {}) VULKAN_HPP_NOEXCEPT : maxExtent(maxExtent_), maxMipLevels(maxMipLevels_), maxArrayLayers(maxArrayLayers_), sampleCounts(sampleCounts_), maxResourceSize(maxResourceSize_) { } VULKAN_HPP_CONSTEXPR ImageFormatProperties(ImageFormatProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageFormatProperties(VkImageFormatProperties const &rhs) VULKAN_HPP_NOEXCEPT : ImageFormatProperties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageFormatProperties &operator=(ImageFormatProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageFormatProperties &operator=(VkImageFormatProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImageFormatProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(maxExtent, maxMipLevels, maxArrayLayers, sampleCounts, maxResourceSize); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImageFormatProperties const &) const = default; #else bool operator==(ImageFormatProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (maxExtent == rhs.maxExtent) && (maxMipLevels == rhs.maxMipLevels) && (maxArrayLayers == rhs.maxArrayLayers) && (sampleCounts == rhs.sampleCounts) && (maxResourceSize == rhs.maxResourceSize); # endif } bool operator!=(ImageFormatProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::Extent3D maxExtent = {}; uint32_t maxMipLevels = {}; uint32_t maxArrayLayers = {}; VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {}; VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImageFormatProperties) == sizeof(VkImageFormatProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImageFormatProperties is not nothrow_move_constructible!"); struct ExternalImageFormatPropertiesNV { using NativeType = VkExternalImageFormatPropertiesNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV(VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ = {}) VULKAN_HPP_NOEXCEPT : imageFormatProperties(imageFormatProperties_), externalMemoryFeatures(externalMemoryFeatures_), exportFromImportedHandleTypes(exportFromImportedHandleTypes_), compatibleHandleTypes(compatibleHandleTypes_) { } VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV(ExternalImageFormatPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; ExternalImageFormatPropertiesNV(VkExternalImageFormatPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT : ExternalImageFormatPropertiesNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ExternalImageFormatPropertiesNV &operator=(ExternalImageFormatPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; ExternalImageFormatPropertiesNV &operator=(VkExternalImageFormatPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkExternalImageFormatPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkExternalImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(imageFormatProperties, externalMemoryFeatures, exportFromImportedHandleTypes, compatibleHandleTypes); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ExternalImageFormatPropertiesNV const &) const = default; #else bool operator==(ExternalImageFormatPropertiesNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (imageFormatProperties == rhs.imageFormatProperties) && (externalMemoryFeatures == rhs.externalMemoryFeatures) && (exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes) && (compatibleHandleTypes == rhs.compatibleHandleTypes); # endif } bool operator!=(ExternalImageFormatPropertiesNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV) == sizeof(VkExternalImageFormatPropertiesNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ExternalImageFormatPropertiesNV is not nothrow_move_constructible!"); struct ExternalMemoryBufferCreateInfo { using NativeType = VkExternalMemoryBufferCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryBufferCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), handleTypes(handleTypes_) { } VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo(ExternalMemoryBufferCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; ExternalMemoryBufferCreateInfo(VkExternalMemoryBufferCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : ExternalMemoryBufferCreateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ExternalMemoryBufferCreateInfo &operator=(ExternalMemoryBufferCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; ExternalMemoryBufferCreateInfo &operator=(VkExternalMemoryBufferCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo & setHandleTypes(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_) VULKAN_HPP_NOEXCEPT { handleTypes = handleTypes_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkExternalMemoryBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkExternalMemoryBufferCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, handleTypes); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ExternalMemoryBufferCreateInfo const &) const = default; #else bool operator==(ExternalMemoryBufferCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (handleTypes == rhs.handleTypes); # endif } bool operator!=(ExternalMemoryBufferCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryBufferCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo) == sizeof(VkExternalMemoryBufferCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ExternalMemoryBufferCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = ExternalMemoryBufferCreateInfo; }; using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo; struct ExternalMemoryImageCreateInfo { using NativeType = VkExternalMemoryImageCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), handleTypes(handleTypes_) { } VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo(ExternalMemoryImageCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; ExternalMemoryImageCreateInfo(VkExternalMemoryImageCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : ExternalMemoryImageCreateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ExternalMemoryImageCreateInfo &operator=(ExternalMemoryImageCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; ExternalMemoryImageCreateInfo &operator=(VkExternalMemoryImageCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo & setHandleTypes(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_) VULKAN_HPP_NOEXCEPT { handleTypes = handleTypes_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkExternalMemoryImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkExternalMemoryImageCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, handleTypes); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ExternalMemoryImageCreateInfo const &) const = default; #else bool operator==(ExternalMemoryImageCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (handleTypes == rhs.handleTypes); # endif } bool operator!=(ExternalMemoryImageCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo) == sizeof(VkExternalMemoryImageCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ExternalMemoryImageCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = ExternalMemoryImageCreateInfo; }; using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo; struct ExternalMemoryImageCreateInfoNV { using NativeType = VkExternalMemoryImageCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfoNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), handleTypes(handleTypes_) { } VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV(ExternalMemoryImageCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; ExternalMemoryImageCreateInfoNV(VkExternalMemoryImageCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : ExternalMemoryImageCreateInfoNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ExternalMemoryImageCreateInfoNV &operator=(ExternalMemoryImageCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; ExternalMemoryImageCreateInfoNV &operator=(VkExternalMemoryImageCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfoNV & setHandleTypes(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_) VULKAN_HPP_NOEXCEPT { handleTypes = handleTypes_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkExternalMemoryImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkExternalMemoryImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, handleTypes); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ExternalMemoryImageCreateInfoNV const &) const = default; #else bool operator==(ExternalMemoryImageCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (handleTypes == rhs.handleTypes); # endif } bool operator!=(ExternalMemoryImageCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfoNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV) == sizeof(VkExternalMemoryImageCreateInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ExternalMemoryImageCreateInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = ExternalMemoryImageCreateInfoNV; }; struct ExternalSemaphoreProperties { using NativeType = VkExternalSemaphoreProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalSemaphoreProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), exportFromImportedHandleTypes(exportFromImportedHandleTypes_), compatibleHandleTypes(compatibleHandleTypes_), externalSemaphoreFeatures(externalSemaphoreFeatures_) { } VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties(ExternalSemaphoreProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; ExternalSemaphoreProperties(VkExternalSemaphoreProperties const &rhs) VULKAN_HPP_NOEXCEPT : ExternalSemaphoreProperties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ExternalSemaphoreProperties &operator=(ExternalSemaphoreProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; ExternalSemaphoreProperties &operator=(VkExternalSemaphoreProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkExternalSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkExternalSemaphoreProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, exportFromImportedHandleTypes, compatibleHandleTypes, externalSemaphoreFeatures); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ExternalSemaphoreProperties const &) const = default; #else bool operator==(ExternalSemaphoreProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes) && (compatibleHandleTypes == rhs.compatibleHandleTypes) && (externalSemaphoreFeatures == rhs.externalSemaphoreFeatures); # endif } bool operator!=(ExternalSemaphoreProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalSemaphoreProperties; void *pNext = {}; VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes = {}; VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes = {}; VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties) == sizeof(VkExternalSemaphoreProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ExternalSemaphoreProperties is not nothrow_move_constructible!"); template<> struct CppType { using Type = ExternalSemaphoreProperties; }; using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties; struct FenceCreateInfo { using NativeType = VkFenceCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR FenceCreateInfo(VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_) { } VULKAN_HPP_CONSTEXPR FenceCreateInfo(FenceCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; FenceCreateInfo(VkFenceCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : FenceCreateInfo(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ FenceCreateInfo &operator=(FenceCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; FenceCreateInfo &operator=(VkFenceCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo &setFlags(VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkFenceCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(FenceCreateInfo const &) const = default; #else bool operator==(FenceCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags); # endif } bool operator!=(FenceCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::FenceCreateFlags flags = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::FenceCreateInfo) == sizeof(VkFenceCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "FenceCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = FenceCreateInfo; }; struct FenceGetFdInfoKHR { using NativeType = VkFenceGetFdInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetFdInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR(VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), fence(fence_), handleType(handleType_) { } VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR(FenceGetFdInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; FenceGetFdInfoKHR(VkFenceGetFdInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : FenceGetFdInfoKHR(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ FenceGetFdInfoKHR &operator=(FenceGetFdInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; FenceGetFdInfoKHR &operator=(VkFenceGetFdInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR &setFence(VULKAN_HPP_NAMESPACE::Fence fence_) VULKAN_HPP_NOEXCEPT { fence = fence_; return *this; } VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR &setHandleType(VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkFenceGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkFenceGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, fence, handleType); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(FenceGetFdInfoKHR const &) const = default; #else bool operator==(FenceGetFdInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (fence == rhs.fence) && (handleType == rhs.handleType); # endif } bool operator!=(FenceGetFdInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetFdInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Fence fence = {}; VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR) == sizeof(VkFenceGetFdInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "FenceGetFdInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = FenceGetFdInfoKHR; }; #if defined(VK_USE_PLATFORM_WIN32_KHR) struct FenceGetWin32HandleInfoKHR { using NativeType = VkFenceGetWin32HandleInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetWin32HandleInfoKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), fence(fence_), handleType(handleType_) { } VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR(FenceGetWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; FenceGetWin32HandleInfoKHR(VkFenceGetWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : FenceGetWin32HandleInfoKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ FenceGetWin32HandleInfoKHR &operator=(FenceGetWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; FenceGetWin32HandleInfoKHR &operator=(VkFenceGetWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR &setFence(VULKAN_HPP_NAMESPACE::Fence fence_) VULKAN_HPP_NOEXCEPT { fence = fence_; return *this; } VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR &setHandleType(VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkFenceGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkFenceGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, fence, handleType); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(FenceGetWin32HandleInfoKHR const &) const = default; # else bool operator==(FenceGetWin32HandleInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (fence == rhs.fence) && (handleType == rhs.handleType); # endif } bool operator!=(FenceGetWin32HandleInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Fence fence = {}; VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR) == sizeof(VkFenceGetWin32HandleInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "FenceGetWin32HandleInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = FenceGetWin32HandleInfoKHR; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ struct FilterCubicImageViewImageFormatPropertiesEXT { using NativeType = VkFilterCubicImageViewImageFormatPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 filterCubic_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), filterCubic(filterCubic_), filterCubicMinmax(filterCubicMinmax_) { } VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT(FilterCubicImageViewImageFormatPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; FilterCubicImageViewImageFormatPropertiesEXT(VkFilterCubicImageViewImageFormatPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT : FilterCubicImageViewImageFormatPropertiesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ FilterCubicImageViewImageFormatPropertiesEXT &operator=(FilterCubicImageViewImageFormatPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; FilterCubicImageViewImageFormatPropertiesEXT &operator=(VkFilterCubicImageViewImageFormatPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkFilterCubicImageViewImageFormatPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkFilterCubicImageViewImageFormatPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, filterCubic, filterCubicMinmax); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(FilterCubicImageViewImageFormatPropertiesEXT const &) const = default; #else bool operator==(FilterCubicImageViewImageFormatPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (filterCubic == rhs.filterCubic) && (filterCubicMinmax == rhs.filterCubicMinmax); # endif } bool operator!=(FilterCubicImageViewImageFormatPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 filterCubic = {}; VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT) == sizeof(VkFilterCubicImageViewImageFormatPropertiesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "FilterCubicImageViewImageFormatPropertiesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = FilterCubicImageViewImageFormatPropertiesEXT; }; struct FormatProperties { using NativeType = VkFormatProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR FormatProperties(VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures_ = {}) VULKAN_HPP_NOEXCEPT : linearTilingFeatures(linearTilingFeatures_), optimalTilingFeatures(optimalTilingFeatures_), bufferFeatures(bufferFeatures_) { } VULKAN_HPP_CONSTEXPR FormatProperties(FormatProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; FormatProperties(VkFormatProperties const &rhs) VULKAN_HPP_NOEXCEPT : FormatProperties(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ FormatProperties &operator=(FormatProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; FormatProperties &operator=(VkFormatProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkFormatProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkFormatProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(linearTilingFeatures, optimalTilingFeatures, bufferFeatures); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(FormatProperties const &) const = default; #else bool operator==(FormatProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (linearTilingFeatures == rhs.linearTilingFeatures) && (optimalTilingFeatures == rhs.optimalTilingFeatures) && (bufferFeatures == rhs.bufferFeatures); # endif } bool operator!=(FormatProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures = {}; VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures = {}; VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::FormatProperties) == sizeof(VkFormatProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "FormatProperties is not nothrow_move_constructible!"); struct FormatProperties2 { using NativeType = VkFormatProperties2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR FormatProperties2(VULKAN_HPP_NAMESPACE::FormatProperties formatProperties_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), formatProperties(formatProperties_) { } VULKAN_HPP_CONSTEXPR FormatProperties2(FormatProperties2 const &rhs) VULKAN_HPP_NOEXCEPT = default; FormatProperties2(VkFormatProperties2 const &rhs) VULKAN_HPP_NOEXCEPT : FormatProperties2(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ FormatProperties2 &operator=(FormatProperties2 const &rhs) VULKAN_HPP_NOEXCEPT = default; FormatProperties2 &operator=(VkFormatProperties2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkFormatProperties2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, formatProperties); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(FormatProperties2 const &) const = default; #else bool operator==(FormatProperties2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (formatProperties == rhs.formatProperties); # endif } bool operator!=(FormatProperties2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties2; void *pNext = {}; VULKAN_HPP_NAMESPACE::FormatProperties formatProperties = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::FormatProperties2) == sizeof(VkFormatProperties2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "FormatProperties2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = FormatProperties2; }; using FormatProperties2KHR = FormatProperties2; struct FormatProperties3 { using NativeType = VkFormatProperties3; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties3; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR FormatProperties3(VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 linearTilingFeatures_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 optimalTilingFeatures_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 bufferFeatures_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), linearTilingFeatures(linearTilingFeatures_), optimalTilingFeatures(optimalTilingFeatures_), bufferFeatures(bufferFeatures_) { } VULKAN_HPP_CONSTEXPR FormatProperties3(FormatProperties3 const &rhs) VULKAN_HPP_NOEXCEPT = default; FormatProperties3(VkFormatProperties3 const &rhs) VULKAN_HPP_NOEXCEPT : FormatProperties3(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ FormatProperties3 &operator=(FormatProperties3 const &rhs) VULKAN_HPP_NOEXCEPT = default; FormatProperties3 &operator=(VkFormatProperties3 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkFormatProperties3 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkFormatProperties3 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, linearTilingFeatures, optimalTilingFeatures, bufferFeatures); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(FormatProperties3 const &) const = default; #else bool operator==(FormatProperties3 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (linearTilingFeatures == rhs.linearTilingFeatures) && (optimalTilingFeatures == rhs.optimalTilingFeatures) && (bufferFeatures == rhs.bufferFeatures); # endif } bool operator!=(FormatProperties3 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties3; void *pNext = {}; VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 linearTilingFeatures = {}; VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 optimalTilingFeatures = {}; VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 bufferFeatures = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::FormatProperties3) == sizeof(VkFormatProperties3), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "FormatProperties3 is not nothrow_move_constructible!"); template<> struct CppType { using Type = FormatProperties3; }; using FormatProperties3KHR = FormatProperties3; struct FragmentShadingRateAttachmentInfoKHR { using NativeType = VkFragmentShadingRateAttachmentInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFragmentShadingRateAttachmentInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR(const VULKAN_HPP_NAMESPACE::AttachmentReference2 *pFragmentShadingRateAttachment_ = {}, VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), pFragmentShadingRateAttachment(pFragmentShadingRateAttachment_), shadingRateAttachmentTexelSize(shadingRateAttachmentTexelSize_) { } VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR(FragmentShadingRateAttachmentInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; FragmentShadingRateAttachmentInfoKHR(VkFragmentShadingRateAttachmentInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : FragmentShadingRateAttachmentInfoKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ FragmentShadingRateAttachmentInfoKHR &operator=(FragmentShadingRateAttachmentInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; FragmentShadingRateAttachmentInfoKHR &operator=(VkFragmentShadingRateAttachmentInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & setPFragmentShadingRateAttachment(const VULKAN_HPP_NAMESPACE::AttachmentReference2 *pFragmentShadingRateAttachment_) VULKAN_HPP_NOEXCEPT { pFragmentShadingRateAttachment = pFragmentShadingRateAttachment_; return *this; } VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & setShadingRateAttachmentTexelSize(VULKAN_HPP_NAMESPACE::Extent2D const &shadingRateAttachmentTexelSize_) VULKAN_HPP_NOEXCEPT { shadingRateAttachmentTexelSize = shadingRateAttachmentTexelSize_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkFragmentShadingRateAttachmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, pFragmentShadingRateAttachment, shadingRateAttachmentTexelSize); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(FragmentShadingRateAttachmentInfoKHR const &) const = default; #else bool operator==(FragmentShadingRateAttachmentInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (pFragmentShadingRateAttachment == rhs.pFragmentShadingRateAttachment) && (shadingRateAttachmentTexelSize == rhs.shadingRateAttachmentTexelSize); # endif } bool operator!=(FragmentShadingRateAttachmentInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFragmentShadingRateAttachmentInfoKHR; const void *pNext = {}; const VULKAN_HPP_NAMESPACE::AttachmentReference2 *pFragmentShadingRateAttachment = {}; VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR) == sizeof(VkFragmentShadingRateAttachmentInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "FragmentShadingRateAttachmentInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = FragmentShadingRateAttachmentInfoKHR; }; struct FramebufferAttachmentImageInfo { using NativeType = VkFramebufferAttachmentImageInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentImageInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo(VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, uint32_t width_ = {}, uint32_t height_ = {}, uint32_t layerCount_ = {}, uint32_t viewFormatCount_ = {}, const VULKAN_HPP_NAMESPACE::Format *pViewFormats_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), usage(usage_), width(width_), height(height_), layerCount(layerCount_), viewFormatCount(viewFormatCount_), pViewFormats(pViewFormats_) { } VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo(FramebufferAttachmentImageInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; FramebufferAttachmentImageInfo(VkFramebufferAttachmentImageInfo const &rhs) VULKAN_HPP_NOEXCEPT : FramebufferAttachmentImageInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) FramebufferAttachmentImageInfo(VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_, uint32_t width_, uint32_t height_, uint32_t layerCount_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &viewFormats_, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , usage(usage_) , width(width_) , height(height_) , layerCount(layerCount_) , viewFormatCount(static_cast(viewFormats_.size())) , pViewFormats(viewFormats_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ FramebufferAttachmentImageInfo &operator=(FramebufferAttachmentImageInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; FramebufferAttachmentImageInfo &operator=(VkFramebufferAttachmentImageInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo &setFlags(VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo &setUsage(VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_) VULKAN_HPP_NOEXCEPT { usage = usage_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo &setWidth(uint32_t width_) VULKAN_HPP_NOEXCEPT { width = width_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo &setHeight(uint32_t height_) VULKAN_HPP_NOEXCEPT { height = height_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo &setLayerCount(uint32_t layerCount_) VULKAN_HPP_NOEXCEPT { layerCount = layerCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo &setViewFormatCount(uint32_t viewFormatCount_) VULKAN_HPP_NOEXCEPT { viewFormatCount = viewFormatCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo &setPViewFormats(const VULKAN_HPP_NAMESPACE::Format *pViewFormats_) VULKAN_HPP_NOEXCEPT { pViewFormats = pViewFormats_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) FramebufferAttachmentImageInfo & setViewFormats(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &viewFormats_) VULKAN_HPP_NOEXCEPT { viewFormatCount = static_cast(viewFormats_.size()); pViewFormats = viewFormats_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkFramebufferAttachmentImageInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkFramebufferAttachmentImageInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, usage, width, height, layerCount, viewFormatCount, pViewFormats); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(FramebufferAttachmentImageInfo const &) const = default; #else bool operator==(FramebufferAttachmentImageInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (usage == rhs.usage) && (width == rhs.width) && (height == rhs.height) && (layerCount == rhs.layerCount) && (viewFormatCount == rhs.viewFormatCount) && (pViewFormats == rhs.pViewFormats); # endif } bool operator!=(FramebufferAttachmentImageInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentImageInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; uint32_t width = {}; uint32_t height = {}; uint32_t layerCount = {}; uint32_t viewFormatCount = {}; const VULKAN_HPP_NAMESPACE::Format *pViewFormats = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo) == sizeof(VkFramebufferAttachmentImageInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "FramebufferAttachmentImageInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = FramebufferAttachmentImageInfo; }; using FramebufferAttachmentImageInfoKHR = FramebufferAttachmentImageInfo; struct FramebufferAttachmentsCreateInfo { using NativeType = VkFramebufferAttachmentsCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentsCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo(uint32_t attachmentImageInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo *pAttachmentImageInfos_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), attachmentImageInfoCount(attachmentImageInfoCount_), pAttachmentImageInfos(pAttachmentImageInfos_) { } VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo(FramebufferAttachmentsCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; FramebufferAttachmentsCreateInfo(VkFramebufferAttachmentsCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : FramebufferAttachmentsCreateInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) FramebufferAttachmentsCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &attachmentImageInfos_, const void *pNext_ = nullptr) : pNext(pNext_) , attachmentImageInfoCount(static_cast(attachmentImageInfos_.size())) , pAttachmentImageInfos(attachmentImageInfos_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ FramebufferAttachmentsCreateInfo &operator=(FramebufferAttachmentsCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; FramebufferAttachmentsCreateInfo &operator=(VkFramebufferAttachmentsCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo &setAttachmentImageInfoCount(uint32_t attachmentImageInfoCount_) VULKAN_HPP_NOEXCEPT { attachmentImageInfoCount = attachmentImageInfoCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & setPAttachmentImageInfos(const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo *pAttachmentImageInfos_) VULKAN_HPP_NOEXCEPT { pAttachmentImageInfos = pAttachmentImageInfos_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) FramebufferAttachmentsCreateInfo &setAttachmentImageInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &attachmentImageInfos_) VULKAN_HPP_NOEXCEPT { attachmentImageInfoCount = static_cast(attachmentImageInfos_.size()); pAttachmentImageInfos = attachmentImageInfos_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkFramebufferAttachmentsCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkFramebufferAttachmentsCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, attachmentImageInfoCount, pAttachmentImageInfos); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(FramebufferAttachmentsCreateInfo const &) const = default; #else bool operator==(FramebufferAttachmentsCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (attachmentImageInfoCount == rhs.attachmentImageInfoCount) && (pAttachmentImageInfos == rhs.pAttachmentImageInfos); # endif } bool operator!=(FramebufferAttachmentsCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentsCreateInfo; const void *pNext = {}; uint32_t attachmentImageInfoCount = {}; const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo *pAttachmentImageInfos = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo) == sizeof(VkFramebufferAttachmentsCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "FramebufferAttachmentsCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = FramebufferAttachmentsCreateInfo; }; using FramebufferAttachmentsCreateInfoKHR = FramebufferAttachmentsCreateInfo; struct FramebufferCreateInfo { using NativeType = VkFramebufferCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR FramebufferCreateInfo(VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageView *pAttachments_ = {}, uint32_t width_ = {}, uint32_t height_ = {}, uint32_t layers_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), renderPass(renderPass_), attachmentCount(attachmentCount_), pAttachments(pAttachments_), width(width_), height(height_), layers(layers_) { } VULKAN_HPP_CONSTEXPR FramebufferCreateInfo(FramebufferCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; FramebufferCreateInfo(VkFramebufferCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : FramebufferCreateInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) FramebufferCreateInfo(VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_, VULKAN_HPP_NAMESPACE::RenderPass renderPass_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &attachments_, uint32_t width_ = {}, uint32_t height_ = {}, uint32_t layers_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , renderPass(renderPass_) , attachmentCount(static_cast(attachments_.size())) , pAttachments(attachments_.data()) , width(width_) , height(height_) , layers(layers_) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ FramebufferCreateInfo &operator=(FramebufferCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; FramebufferCreateInfo &operator=(VkFramebufferCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo &setFlags(VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo &setRenderPass(VULKAN_HPP_NAMESPACE::RenderPass renderPass_) VULKAN_HPP_NOEXCEPT { renderPass = renderPass_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo &setAttachmentCount(uint32_t attachmentCount_) VULKAN_HPP_NOEXCEPT { attachmentCount = attachmentCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo &setPAttachments(const VULKAN_HPP_NAMESPACE::ImageView *pAttachments_) VULKAN_HPP_NOEXCEPT { pAttachments = pAttachments_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) FramebufferCreateInfo & setAttachments(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &attachments_) VULKAN_HPP_NOEXCEPT { attachmentCount = static_cast(attachments_.size()); pAttachments = attachments_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo &setWidth(uint32_t width_) VULKAN_HPP_NOEXCEPT { width = width_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo &setHeight(uint32_t height_) VULKAN_HPP_NOEXCEPT { height = height_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo &setLayers(uint32_t layers_) VULKAN_HPP_NOEXCEPT { layers = layers_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkFramebufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkFramebufferCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, renderPass, attachmentCount, pAttachments, width, height, layers); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(FramebufferCreateInfo const &) const = default; #else bool operator==(FramebufferCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (renderPass == rhs.renderPass) && (attachmentCount == rhs.attachmentCount) && (pAttachments == rhs.pAttachments) && (width == rhs.width) && (height == rhs.height) && (layers == rhs.layers); # endif } bool operator!=(FramebufferCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags = {}; VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; uint32_t attachmentCount = {}; const VULKAN_HPP_NAMESPACE::ImageView *pAttachments = {}; uint32_t width = {}; uint32_t height = {}; uint32_t layers = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::FramebufferCreateInfo) == sizeof(VkFramebufferCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "FramebufferCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = FramebufferCreateInfo; }; struct FramebufferMixedSamplesCombinationNV { using NativeType = VkFramebufferMixedSamplesCombinationNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferMixedSamplesCombinationNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge, VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), coverageReductionMode(coverageReductionMode_), rasterizationSamples(rasterizationSamples_), depthStencilSamples(depthStencilSamples_), colorSamples(colorSamples_) { } VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV(FramebufferMixedSamplesCombinationNV const &rhs) VULKAN_HPP_NOEXCEPT = default; FramebufferMixedSamplesCombinationNV(VkFramebufferMixedSamplesCombinationNV const &rhs) VULKAN_HPP_NOEXCEPT : FramebufferMixedSamplesCombinationNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ FramebufferMixedSamplesCombinationNV &operator=(FramebufferMixedSamplesCombinationNV const &rhs) VULKAN_HPP_NOEXCEPT = default; FramebufferMixedSamplesCombinationNV &operator=(VkFramebufferMixedSamplesCombinationNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkFramebufferMixedSamplesCombinationNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkFramebufferMixedSamplesCombinationNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, coverageReductionMode, rasterizationSamples, depthStencilSamples, colorSamples); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(FramebufferMixedSamplesCombinationNV const &) const = default; #else bool operator==(FramebufferMixedSamplesCombinationNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (coverageReductionMode == rhs.coverageReductionMode) && (rasterizationSamples == rhs.rasterizationSamples) && (depthStencilSamples == rhs.depthStencilSamples) && (colorSamples == rhs.colorSamples); # endif } bool operator!=(FramebufferMixedSamplesCombinationNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferMixedSamplesCombinationNV; void *pNext = {}; VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge; VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples = {}; VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV) == sizeof(VkFramebufferMixedSamplesCombinationNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "FramebufferMixedSamplesCombinationNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = FramebufferMixedSamplesCombinationNV; }; struct IndirectCommandsStreamNV { using NativeType = VkIndirectCommandsStreamNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}) VULKAN_HPP_NOEXCEPT : buffer(buffer_), offset(offset_) { } VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV(IndirectCommandsStreamNV const &rhs) VULKAN_HPP_NOEXCEPT = default; IndirectCommandsStreamNV(VkIndirectCommandsStreamNV const &rhs) VULKAN_HPP_NOEXCEPT : IndirectCommandsStreamNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ IndirectCommandsStreamNV &operator=(IndirectCommandsStreamNV const &rhs) VULKAN_HPP_NOEXCEPT = default; IndirectCommandsStreamNV &operator=(VkIndirectCommandsStreamNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV &setBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer_) VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV &setOffset(VULKAN_HPP_NAMESPACE::DeviceSize offset_) VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkIndirectCommandsStreamNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkIndirectCommandsStreamNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(buffer, offset); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(IndirectCommandsStreamNV const &) const = default; #else bool operator==(IndirectCommandsStreamNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (buffer == rhs.buffer) && (offset == rhs.offset); # endif } bool operator!=(IndirectCommandsStreamNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::Buffer buffer = {}; VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV) == sizeof(VkIndirectCommandsStreamNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "IndirectCommandsStreamNV is not nothrow_move_constructible!"); struct GeneratedCommandsInfoNV { using NativeType = VkGeneratedCommandsInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsInfoNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, uint32_t streamCount_ = {}, const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV *pStreams_ = {}, uint32_t sequencesCount_ = {}, VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), pipelineBindPoint(pipelineBindPoint_), pipeline(pipeline_), indirectCommandsLayout(indirectCommandsLayout_), streamCount(streamCount_), pStreams(pStreams_), sequencesCount(sequencesCount_), preprocessBuffer(preprocessBuffer_), preprocessOffset(preprocessOffset_), preprocessSize(preprocessSize_), sequencesCountBuffer(sequencesCountBuffer_), sequencesCountOffset(sequencesCountOffset_), sequencesIndexBuffer(sequencesIndexBuffer_), sequencesIndexOffset(sequencesIndexOffset_) { } VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV(GeneratedCommandsInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; GeneratedCommandsInfoNV(VkGeneratedCommandsInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : GeneratedCommandsInfoNV(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) GeneratedCommandsInfoNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, VULKAN_HPP_NAMESPACE::Pipeline pipeline_, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &streams_, uint32_t sequencesCount_ = {}, VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , pipelineBindPoint(pipelineBindPoint_) , pipeline(pipeline_) , indirectCommandsLayout(indirectCommandsLayout_) , streamCount(static_cast(streams_.size())) , pStreams(streams_.data()) , sequencesCount(sequencesCount_) , preprocessBuffer(preprocessBuffer_) , preprocessOffset(preprocessOffset_) , preprocessSize(preprocessSize_) , sequencesCountBuffer(sequencesCountBuffer_) , sequencesCountOffset(sequencesCountOffset_) , sequencesIndexBuffer(sequencesIndexBuffer_) , sequencesIndexOffset(sequencesIndexOffset_) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ GeneratedCommandsInfoNV &operator=(GeneratedCommandsInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; GeneratedCommandsInfoNV &operator=(VkGeneratedCommandsInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV &setPipelineBindPoint(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_) VULKAN_HPP_NOEXCEPT { pipelineBindPoint = pipelineBindPoint_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV &setPipeline(VULKAN_HPP_NAMESPACE::Pipeline pipeline_) VULKAN_HPP_NOEXCEPT { pipeline = pipeline_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setIndirectCommandsLayout(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_) VULKAN_HPP_NOEXCEPT { indirectCommandsLayout = indirectCommandsLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV &setStreamCount(uint32_t streamCount_) VULKAN_HPP_NOEXCEPT { streamCount = streamCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV &setPStreams(const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV *pStreams_) VULKAN_HPP_NOEXCEPT { pStreams = pStreams_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) GeneratedCommandsInfoNV & setStreams(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &streams_) VULKAN_HPP_NOEXCEPT { streamCount = static_cast(streams_.size()); pStreams = streams_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV &setSequencesCount(uint32_t sequencesCount_) VULKAN_HPP_NOEXCEPT { sequencesCount = sequencesCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV &setPreprocessBuffer(VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_) VULKAN_HPP_NOEXCEPT { preprocessBuffer = preprocessBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV &setPreprocessOffset(VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_) VULKAN_HPP_NOEXCEPT { preprocessOffset = preprocessOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV &setPreprocessSize(VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_) VULKAN_HPP_NOEXCEPT { preprocessSize = preprocessSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV &setSequencesCountBuffer(VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_) VULKAN_HPP_NOEXCEPT { sequencesCountBuffer = sequencesCountBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV &setSequencesCountOffset(VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_) VULKAN_HPP_NOEXCEPT { sequencesCountOffset = sequencesCountOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV &setSequencesIndexBuffer(VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_) VULKAN_HPP_NOEXCEPT { sequencesIndexBuffer = sequencesIndexBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV &setSequencesIndexOffset(VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_) VULKAN_HPP_NOEXCEPT { sequencesIndexOffset = sequencesIndexOffset_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkGeneratedCommandsInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkGeneratedCommandsInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, pipelineBindPoint, pipeline, indirectCommandsLayout, streamCount, pStreams, sequencesCount, preprocessBuffer, preprocessOffset, preprocessSize, sequencesCountBuffer, sequencesCountOffset, sequencesIndexBuffer, sequencesIndexOffset); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(GeneratedCommandsInfoNV const &) const = default; #else bool operator==(GeneratedCommandsInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (pipelineBindPoint == rhs.pipelineBindPoint) && (pipeline == rhs.pipeline) && (indirectCommandsLayout == rhs.indirectCommandsLayout) && (streamCount == rhs.streamCount) && (pStreams == rhs.pStreams) && (sequencesCount == rhs.sequencesCount) && (preprocessBuffer == rhs.preprocessBuffer) && (preprocessOffset == rhs.preprocessOffset) && (preprocessSize == rhs.preprocessSize) && (sequencesCountBuffer == rhs.sequencesCountBuffer) && (sequencesCountOffset == rhs.sequencesCountOffset) && (sequencesIndexBuffer == rhs.sequencesIndexBuffer) && (sequencesIndexOffset == rhs.sequencesIndexOffset); # endif } bool operator!=(GeneratedCommandsInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsInfoNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {}; uint32_t streamCount = {}; const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV *pStreams = {}; uint32_t sequencesCount = {}; VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer = {}; VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset = {}; VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize = {}; VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer = {}; VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset = {}; VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer = {}; VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV) == sizeof(VkGeneratedCommandsInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "GeneratedCommandsInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = GeneratedCommandsInfoNV; }; struct GeneratedCommandsMemoryRequirementsInfoNV { using NativeType = VkGeneratedCommandsMemoryRequirementsInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, uint32_t maxSequencesCount_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), pipelineBindPoint(pipelineBindPoint_), pipeline(pipeline_), indirectCommandsLayout(indirectCommandsLayout_), maxSequencesCount(maxSequencesCount_) { } VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV(GeneratedCommandsMemoryRequirementsInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; GeneratedCommandsMemoryRequirementsInfoNV(VkGeneratedCommandsMemoryRequirementsInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : GeneratedCommandsMemoryRequirementsInfoNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ GeneratedCommandsMemoryRequirementsInfoNV &operator=(GeneratedCommandsMemoryRequirementsInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; GeneratedCommandsMemoryRequirementsInfoNV &operator=(VkGeneratedCommandsMemoryRequirementsInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setPipelineBindPoint(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_) VULKAN_HPP_NOEXCEPT { pipelineBindPoint = pipelineBindPoint_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV &setPipeline(VULKAN_HPP_NAMESPACE::Pipeline pipeline_) VULKAN_HPP_NOEXCEPT { pipeline = pipeline_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setIndirectCommandsLayout(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_) VULKAN_HPP_NOEXCEPT { indirectCommandsLayout = indirectCommandsLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV &setMaxSequencesCount(uint32_t maxSequencesCount_) VULKAN_HPP_NOEXCEPT { maxSequencesCount = maxSequencesCount_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkGeneratedCommandsMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkGeneratedCommandsMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, pipelineBindPoint, pipeline, indirectCommandsLayout, maxSequencesCount); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(GeneratedCommandsMemoryRequirementsInfoNV const &) const = default; #else bool operator==(GeneratedCommandsMemoryRequirementsInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (pipelineBindPoint == rhs.pipelineBindPoint) && (pipeline == rhs.pipeline) && (indirectCommandsLayout == rhs.indirectCommandsLayout) && (maxSequencesCount == rhs.maxSequencesCount); # endif } bool operator!=(GeneratedCommandsMemoryRequirementsInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {}; uint32_t maxSequencesCount = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV) == sizeof(VkGeneratedCommandsMemoryRequirementsInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "GeneratedCommandsMemoryRequirementsInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = GeneratedCommandsMemoryRequirementsInfoNV; }; struct VertexInputBindingDescription { using NativeType = VkVertexInputBindingDescription; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VertexInputBindingDescription(uint32_t binding_ = {}, uint32_t stride_ = {}, VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex) VULKAN_HPP_NOEXCEPT : binding(binding_), stride(stride_), inputRate(inputRate_) { } VULKAN_HPP_CONSTEXPR VertexInputBindingDescription(VertexInputBindingDescription const &rhs) VULKAN_HPP_NOEXCEPT = default; VertexInputBindingDescription(VkVertexInputBindingDescription const &rhs) VULKAN_HPP_NOEXCEPT : VertexInputBindingDescription(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VertexInputBindingDescription &operator=(VertexInputBindingDescription const &rhs) VULKAN_HPP_NOEXCEPT = default; VertexInputBindingDescription &operator=(VkVertexInputBindingDescription const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription &setBinding(uint32_t binding_) VULKAN_HPP_NOEXCEPT { binding = binding_; return *this; } VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription &setStride(uint32_t stride_) VULKAN_HPP_NOEXCEPT { stride = stride_; return *this; } VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription &setInputRate(VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_) VULKAN_HPP_NOEXCEPT { inputRate = inputRate_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVertexInputBindingDescription const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVertexInputBindingDescription &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(binding, stride, inputRate); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VertexInputBindingDescription const &) const = default; #else bool operator==(VertexInputBindingDescription const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (binding == rhs.binding) && (stride == rhs.stride) && (inputRate == rhs.inputRate); # endif } bool operator!=(VertexInputBindingDescription const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t binding = {}; uint32_t stride = {}; VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VertexInputBindingDescription) == sizeof(VkVertexInputBindingDescription), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VertexInputBindingDescription is not nothrow_move_constructible!"); struct VertexInputAttributeDescription { using NativeType = VkVertexInputAttributeDescription; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription(uint32_t location_ = {}, uint32_t binding_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint32_t offset_ = {}) VULKAN_HPP_NOEXCEPT : location(location_), binding(binding_), format(format_), offset(offset_) { } VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription(VertexInputAttributeDescription const &rhs) VULKAN_HPP_NOEXCEPT = default; VertexInputAttributeDescription(VkVertexInputAttributeDescription const &rhs) VULKAN_HPP_NOEXCEPT : VertexInputAttributeDescription(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VertexInputAttributeDescription &operator=(VertexInputAttributeDescription const &rhs) VULKAN_HPP_NOEXCEPT = default; VertexInputAttributeDescription &operator=(VkVertexInputAttributeDescription const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription &setLocation(uint32_t location_) VULKAN_HPP_NOEXCEPT { location = location_; return *this; } VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription &setBinding(uint32_t binding_) VULKAN_HPP_NOEXCEPT { binding = binding_; return *this; } VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription &setFormat(VULKAN_HPP_NAMESPACE::Format format_) VULKAN_HPP_NOEXCEPT { format = format_; return *this; } VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription &setOffset(uint32_t offset_) VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVertexInputAttributeDescription const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVertexInputAttributeDescription &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(location, binding, format, offset); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VertexInputAttributeDescription const &) const = default; #else bool operator==(VertexInputAttributeDescription const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (location == rhs.location) && (binding == rhs.binding) && (format == rhs.format) && (offset == rhs.offset); # endif } bool operator!=(VertexInputAttributeDescription const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t location = {}; uint32_t binding = {}; VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; uint32_t offset = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription) == sizeof(VkVertexInputAttributeDescription), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VertexInputAttributeDescription is not nothrow_move_constructible!"); struct PipelineVertexInputStateCreateInfo { using NativeType = VkPipelineVertexInputStateCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputStateCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ = {}, uint32_t vertexBindingDescriptionCount_ = {}, const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription *pVertexBindingDescriptions_ = {}, uint32_t vertexAttributeDescriptionCount_ = {}, const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription *pVertexAttributeDescriptions_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), vertexBindingDescriptionCount(vertexBindingDescriptionCount_), pVertexBindingDescriptions(pVertexBindingDescriptions_), vertexAttributeDescriptionCount(vertexAttributeDescriptionCount_), pVertexAttributeDescriptions(pVertexAttributeDescriptions_) { } VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo(PipelineVertexInputStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineVertexInputStateCreateInfo(VkPipelineVertexInputStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : PipelineVertexInputStateCreateInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineVertexInputStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &vertexBindingDescriptions_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &vertexAttributeDescriptions_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , vertexBindingDescriptionCount(static_cast(vertexBindingDescriptions_.size())) , pVertexBindingDescriptions(vertexBindingDescriptions_.data()) , vertexAttributeDescriptionCount(static_cast(vertexAttributeDescriptions_.size())) , pVertexAttributeDescriptions(vertexAttributeDescriptions_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineVertexInputStateCreateInfo &operator=(PipelineVertexInputStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineVertexInputStateCreateInfo &operator=(VkPipelineVertexInputStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setFlags(VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo &setVertexBindingDescriptionCount(uint32_t vertexBindingDescriptionCount_) VULKAN_HPP_NOEXCEPT { vertexBindingDescriptionCount = vertexBindingDescriptionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setPVertexBindingDescriptions(const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription *pVertexBindingDescriptions_) VULKAN_HPP_NOEXCEPT { pVertexBindingDescriptions = pVertexBindingDescriptions_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineVertexInputStateCreateInfo &setVertexBindingDescriptions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &vertexBindingDescriptions_) VULKAN_HPP_NOEXCEPT { vertexBindingDescriptionCount = static_cast(vertexBindingDescriptions_.size()); pVertexBindingDescriptions = vertexBindingDescriptions_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptionCount(uint32_t vertexAttributeDescriptionCount_) VULKAN_HPP_NOEXCEPT { vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setPVertexAttributeDescriptions(const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription *pVertexAttributeDescriptions_) VULKAN_HPP_NOEXCEPT { pVertexAttributeDescriptions = pVertexAttributeDescriptions_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineVertexInputStateCreateInfo &setVertexAttributeDescriptions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &vertexAttributeDescriptions_) VULKAN_HPP_NOEXCEPT { vertexAttributeDescriptionCount = static_cast(vertexAttributeDescriptions_.size()); pVertexAttributeDescriptions = vertexAttributeDescriptions_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineVertexInputStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineVertexInputStateCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineVertexInputStateCreateInfo const &) const = default; #else bool operator==(PipelineVertexInputStateCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount) && (pVertexBindingDescriptions == rhs.pVertexBindingDescriptions) && (vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount) && (pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions); # endif } bool operator!=(PipelineVertexInputStateCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags = {}; uint32_t vertexBindingDescriptionCount = {}; const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription *pVertexBindingDescriptions = {}; uint32_t vertexAttributeDescriptionCount = {}; const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription *pVertexAttributeDescriptions = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo) == sizeof(VkPipelineVertexInputStateCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineVertexInputStateCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineVertexInputStateCreateInfo; }; struct PipelineInputAssemblyStateCreateInfo { using NativeType = VkPipelineInputAssemblyStateCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInputAssemblyStateCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList, VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), topology(topology_), primitiveRestartEnable(primitiveRestartEnable_) { } VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo(PipelineInputAssemblyStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineInputAssemblyStateCreateInfo(VkPipelineInputAssemblyStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : PipelineInputAssemblyStateCreateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineInputAssemblyStateCreateInfo &operator=(PipelineInputAssemblyStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineInputAssemblyStateCreateInfo &operator=(VkPipelineInputAssemblyStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setFlags(VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo &setTopology(VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_) VULKAN_HPP_NOEXCEPT { topology = topology_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setPrimitiveRestartEnable(VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_) VULKAN_HPP_NOEXCEPT { primitiveRestartEnable = primitiveRestartEnable_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineInputAssemblyStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineInputAssemblyStateCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, topology, primitiveRestartEnable); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineInputAssemblyStateCreateInfo const &) const = default; #else bool operator==(PipelineInputAssemblyStateCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (topology == rhs.topology) && (primitiveRestartEnable == rhs.primitiveRestartEnable); # endif } bool operator!=(PipelineInputAssemblyStateCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags = {}; VULKAN_HPP_NAMESPACE::PrimitiveTopology topology = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList; VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo) == sizeof(VkPipelineInputAssemblyStateCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineInputAssemblyStateCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineInputAssemblyStateCreateInfo; }; struct PipelineTessellationStateCreateInfo { using NativeType = VkPipelineTessellationStateCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationStateCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ = {}, uint32_t patchControlPoints_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), patchControlPoints(patchControlPoints_) { } VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo(PipelineTessellationStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineTessellationStateCreateInfo(VkPipelineTessellationStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : PipelineTessellationStateCreateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineTessellationStateCreateInfo &operator=(PipelineTessellationStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineTessellationStateCreateInfo &operator=(VkPipelineTessellationStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & setFlags(VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo &setPatchControlPoints(uint32_t patchControlPoints_) VULKAN_HPP_NOEXCEPT { patchControlPoints = patchControlPoints_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineTessellationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineTessellationStateCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, patchControlPoints); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineTessellationStateCreateInfo const &) const = default; #else bool operator==(PipelineTessellationStateCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (patchControlPoints == rhs.patchControlPoints); # endif } bool operator!=(PipelineTessellationStateCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationStateCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags = {}; uint32_t patchControlPoints = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo) == sizeof(VkPipelineTessellationStateCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineTessellationStateCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineTessellationStateCreateInfo; }; struct PipelineViewportStateCreateInfo { using NativeType = VkPipelineViewportStateCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportStateCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::Viewport *pViewports_ = {}, uint32_t scissorCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D *pScissors_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), viewportCount(viewportCount_), pViewports(pViewports_), scissorCount(scissorCount_), pScissors(pScissors_) { } VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo(PipelineViewportStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineViewportStateCreateInfo(VkPipelineViewportStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : PipelineViewportStateCreateInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineViewportStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &viewports_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &scissors_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , viewportCount(static_cast(viewports_.size())) , pViewports(viewports_.data()) , scissorCount(static_cast(scissors_.size())) , pScissors(scissors_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineViewportStateCreateInfo &operator=(PipelineViewportStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineViewportStateCreateInfo &operator=(VkPipelineViewportStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo &setFlags(VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo &setViewportCount(uint32_t viewportCount_) VULKAN_HPP_NOEXCEPT { viewportCount = viewportCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo &setPViewports(const VULKAN_HPP_NAMESPACE::Viewport *pViewports_) VULKAN_HPP_NOEXCEPT { pViewports = pViewports_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineViewportStateCreateInfo & setViewports(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &viewports_) VULKAN_HPP_NOEXCEPT { viewportCount = static_cast(viewports_.size()); pViewports = viewports_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo &setScissorCount(uint32_t scissorCount_) VULKAN_HPP_NOEXCEPT { scissorCount = scissorCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo &setPScissors(const VULKAN_HPP_NAMESPACE::Rect2D *pScissors_) VULKAN_HPP_NOEXCEPT { pScissors = pScissors_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineViewportStateCreateInfo & setScissors(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &scissors_) VULKAN_HPP_NOEXCEPT { scissorCount = static_cast(scissors_.size()); pScissors = scissors_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineViewportStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineViewportStateCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, viewportCount, pViewports, scissorCount, pScissors); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineViewportStateCreateInfo const &) const = default; #else bool operator==(PipelineViewportStateCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (viewportCount == rhs.viewportCount) && (pViewports == rhs.pViewports) && (scissorCount == rhs.scissorCount) && (pScissors == rhs.pScissors); # endif } bool operator!=(PipelineViewportStateCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportStateCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags = {}; uint32_t viewportCount = {}; const VULKAN_HPP_NAMESPACE::Viewport *pViewports = {}; uint32_t scissorCount = {}; const VULKAN_HPP_NAMESPACE::Rect2D *pScissors = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo) == sizeof(VkPipelineViewportStateCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineViewportStateCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineViewportStateCreateInfo; }; struct PipelineRasterizationStateCreateInfo { using NativeType = VkPipelineRasterizationStateCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ = {}, VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ = VULKAN_HPP_NAMESPACE::PolygonMode::eFill, VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ = {}, VULKAN_HPP_NAMESPACE::FrontFace frontFace_ = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise, VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ = {}, float depthBiasConstantFactor_ = {}, float depthBiasClamp_ = {}, float depthBiasSlopeFactor_ = {}, float lineWidth_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), depthClampEnable(depthClampEnable_), rasterizerDiscardEnable(rasterizerDiscardEnable_), polygonMode(polygonMode_), cullMode(cullMode_), frontFace(frontFace_), depthBiasEnable(depthBiasEnable_), depthBiasConstantFactor(depthBiasConstantFactor_), depthBiasClamp(depthBiasClamp_), depthBiasSlopeFactor(depthBiasSlopeFactor_), lineWidth(lineWidth_) { } VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo(PipelineRasterizationStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineRasterizationStateCreateInfo(VkPipelineRasterizationStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : PipelineRasterizationStateCreateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineRasterizationStateCreateInfo &operator=(PipelineRasterizationStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineRasterizationStateCreateInfo &operator=(VkPipelineRasterizationStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setFlags(VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo &setDepthClampEnable(VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_) VULKAN_HPP_NOEXCEPT { depthClampEnable = depthClampEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setRasterizerDiscardEnable(VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_) VULKAN_HPP_NOEXCEPT { rasterizerDiscardEnable = rasterizerDiscardEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo &setPolygonMode(VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_) VULKAN_HPP_NOEXCEPT { polygonMode = polygonMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo &setCullMode(VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_) VULKAN_HPP_NOEXCEPT { cullMode = cullMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo &setFrontFace(VULKAN_HPP_NAMESPACE::FrontFace frontFace_) VULKAN_HPP_NOEXCEPT { frontFace = frontFace_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo &setDepthBiasEnable(VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_) VULKAN_HPP_NOEXCEPT { depthBiasEnable = depthBiasEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo &setDepthBiasConstantFactor(float depthBiasConstantFactor_) VULKAN_HPP_NOEXCEPT { depthBiasConstantFactor = depthBiasConstantFactor_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo &setDepthBiasClamp(float depthBiasClamp_) VULKAN_HPP_NOEXCEPT { depthBiasClamp = depthBiasClamp_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo &setDepthBiasSlopeFactor(float depthBiasSlopeFactor_) VULKAN_HPP_NOEXCEPT { depthBiasSlopeFactor = depthBiasSlopeFactor_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo &setLineWidth(float lineWidth_) VULKAN_HPP_NOEXCEPT { lineWidth = lineWidth_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineRasterizationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineRasterizationStateCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, depthClampEnable, rasterizerDiscardEnable, polygonMode, cullMode, frontFace, depthBiasEnable, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor, lineWidth); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineRasterizationStateCreateInfo const &) const = default; #else bool operator==(PipelineRasterizationStateCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (depthClampEnable == rhs.depthClampEnable) && (rasterizerDiscardEnable == rhs.rasterizerDiscardEnable) && (polygonMode == rhs.polygonMode) && (cullMode == rhs.cullMode) && (frontFace == rhs.frontFace) && (depthBiasEnable == rhs.depthBiasEnable) && (depthBiasConstantFactor == rhs.depthBiasConstantFactor) && (depthBiasClamp == rhs.depthBiasClamp) && (depthBiasSlopeFactor == rhs.depthBiasSlopeFactor) && (lineWidth == rhs.lineWidth); # endif } bool operator!=(PipelineRasterizationStateCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags = {}; VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable = {}; VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable = {}; VULKAN_HPP_NAMESPACE::PolygonMode polygonMode = VULKAN_HPP_NAMESPACE::PolygonMode::eFill; VULKAN_HPP_NAMESPACE::CullModeFlags cullMode = {}; VULKAN_HPP_NAMESPACE::FrontFace frontFace = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise; VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable = {}; float depthBiasConstantFactor = {}; float depthBiasClamp = {}; float depthBiasSlopeFactor = {}; float lineWidth = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo) == sizeof(VkPipelineRasterizationStateCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineRasterizationStateCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineRasterizationStateCreateInfo; }; struct PipelineMultisampleStateCreateInfo { using NativeType = VkPipelineMultisampleStateCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineMultisampleStateCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ = {}, float minSampleShading_ = {}, const VULKAN_HPP_NAMESPACE::SampleMask *pSampleMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), rasterizationSamples(rasterizationSamples_), sampleShadingEnable(sampleShadingEnable_), minSampleShading(minSampleShading_), pSampleMask(pSampleMask_), alphaToCoverageEnable(alphaToCoverageEnable_), alphaToOneEnable(alphaToOneEnable_) { } VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo(PipelineMultisampleStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineMultisampleStateCreateInfo(VkPipelineMultisampleStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : PipelineMultisampleStateCreateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineMultisampleStateCreateInfo &operator=(PipelineMultisampleStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineMultisampleStateCreateInfo &operator=(VkPipelineMultisampleStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setFlags(VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setRasterizationSamples(VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_) VULKAN_HPP_NOEXCEPT { rasterizationSamples = rasterizationSamples_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo &setSampleShadingEnable(VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_) VULKAN_HPP_NOEXCEPT { sampleShadingEnable = sampleShadingEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo &setMinSampleShading(float minSampleShading_) VULKAN_HPP_NOEXCEPT { minSampleShading = minSampleShading_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo &setPSampleMask(const VULKAN_HPP_NAMESPACE::SampleMask *pSampleMask_) VULKAN_HPP_NOEXCEPT { pSampleMask = pSampleMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setAlphaToCoverageEnable(VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_) VULKAN_HPP_NOEXCEPT { alphaToCoverageEnable = alphaToCoverageEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo &setAlphaToOneEnable(VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_) VULKAN_HPP_NOEXCEPT { alphaToOneEnable = alphaToOneEnable_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineMultisampleStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineMultisampleStateCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, rasterizationSamples, sampleShadingEnable, minSampleShading, pSampleMask, alphaToCoverageEnable, alphaToOneEnable); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineMultisampleStateCreateInfo const &) const = default; #else bool operator==(PipelineMultisampleStateCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (rasterizationSamples == rhs.rasterizationSamples) && (sampleShadingEnable == rhs.sampleShadingEnable) && (minSampleShading == rhs.minSampleShading) && (pSampleMask == rhs.pSampleMask) && (alphaToCoverageEnable == rhs.alphaToCoverageEnable) && (alphaToOneEnable == rhs.alphaToOneEnable); # endif } bool operator!=(PipelineMultisampleStateCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags = {}; VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable = {}; float minSampleShading = {}; const VULKAN_HPP_NAMESPACE::SampleMask *pSampleMask = {}; VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable = {}; VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo) == sizeof(VkPipelineMultisampleStateCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineMultisampleStateCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineMultisampleStateCreateInfo; }; struct StencilOpState { using NativeType = VkStencilOpState; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR StencilOpState(VULKAN_HPP_NAMESPACE::StencilOp failOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, VULKAN_HPP_NAMESPACE::StencilOp passOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, uint32_t compareMask_ = {}, uint32_t writeMask_ = {}, uint32_t reference_ = {}) VULKAN_HPP_NOEXCEPT : failOp(failOp_), passOp(passOp_), depthFailOp(depthFailOp_), compareOp(compareOp_), compareMask(compareMask_), writeMask(writeMask_), reference(reference_) { } VULKAN_HPP_CONSTEXPR StencilOpState(StencilOpState const &rhs) VULKAN_HPP_NOEXCEPT = default; StencilOpState(VkStencilOpState const &rhs) VULKAN_HPP_NOEXCEPT : StencilOpState(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ StencilOpState &operator=(StencilOpState const &rhs) VULKAN_HPP_NOEXCEPT = default; StencilOpState &operator=(VkStencilOpState const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 StencilOpState &setFailOp(VULKAN_HPP_NAMESPACE::StencilOp failOp_) VULKAN_HPP_NOEXCEPT { failOp = failOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 StencilOpState &setPassOp(VULKAN_HPP_NAMESPACE::StencilOp passOp_) VULKAN_HPP_NOEXCEPT { passOp = passOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 StencilOpState &setDepthFailOp(VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_) VULKAN_HPP_NOEXCEPT { depthFailOp = depthFailOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 StencilOpState &setCompareOp(VULKAN_HPP_NAMESPACE::CompareOp compareOp_) VULKAN_HPP_NOEXCEPT { compareOp = compareOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 StencilOpState &setCompareMask(uint32_t compareMask_) VULKAN_HPP_NOEXCEPT { compareMask = compareMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 StencilOpState &setWriteMask(uint32_t writeMask_) VULKAN_HPP_NOEXCEPT { writeMask = writeMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 StencilOpState &setReference(uint32_t reference_) VULKAN_HPP_NOEXCEPT { reference = reference_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkStencilOpState const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkStencilOpState &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(failOp, passOp, depthFailOp, compareOp, compareMask, writeMask, reference); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(StencilOpState const &) const = default; #else bool operator==(StencilOpState const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (failOp == rhs.failOp) && (passOp == rhs.passOp) && (depthFailOp == rhs.depthFailOp) && (compareOp == rhs.compareOp) && (compareMask == rhs.compareMask) && (writeMask == rhs.writeMask) && (reference == rhs.reference); # endif } bool operator!=(StencilOpState const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StencilOp failOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; VULKAN_HPP_NAMESPACE::StencilOp passOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; VULKAN_HPP_NAMESPACE::StencilOp depthFailOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; uint32_t compareMask = {}; uint32_t writeMask = {}; uint32_t reference = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::StencilOpState) == sizeof(VkStencilOpState), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "StencilOpState is not nothrow_move_constructible!"); struct PipelineDepthStencilStateCreateInfo { using NativeType = VkPipelineDepthStencilStateCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDepthStencilStateCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ = {}, VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ = {}, VULKAN_HPP_NAMESPACE::StencilOpState front_ = {}, VULKAN_HPP_NAMESPACE::StencilOpState back_ = {}, float minDepthBounds_ = {}, float maxDepthBounds_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), depthTestEnable(depthTestEnable_), depthWriteEnable(depthWriteEnable_), depthCompareOp(depthCompareOp_), depthBoundsTestEnable(depthBoundsTestEnable_), stencilTestEnable(stencilTestEnable_), front(front_), back(back_), minDepthBounds(minDepthBounds_), maxDepthBounds(maxDepthBounds_) { } VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo(PipelineDepthStencilStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineDepthStencilStateCreateInfo(VkPipelineDepthStencilStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : PipelineDepthStencilStateCreateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineDepthStencilStateCreateInfo &operator=(PipelineDepthStencilStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineDepthStencilStateCreateInfo &operator=(VkPipelineDepthStencilStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setFlags(VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo &setDepthTestEnable(VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_) VULKAN_HPP_NOEXCEPT { depthTestEnable = depthTestEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo &setDepthWriteEnable(VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_) VULKAN_HPP_NOEXCEPT { depthWriteEnable = depthWriteEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo &setDepthCompareOp(VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_) VULKAN_HPP_NOEXCEPT { depthCompareOp = depthCompareOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthBoundsTestEnable(VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_) VULKAN_HPP_NOEXCEPT { depthBoundsTestEnable = depthBoundsTestEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo &setStencilTestEnable(VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_) VULKAN_HPP_NOEXCEPT { stencilTestEnable = stencilTestEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo &setFront(VULKAN_HPP_NAMESPACE::StencilOpState const &front_) VULKAN_HPP_NOEXCEPT { front = front_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo &setBack(VULKAN_HPP_NAMESPACE::StencilOpState const &back_) VULKAN_HPP_NOEXCEPT { back = back_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo &setMinDepthBounds(float minDepthBounds_) VULKAN_HPP_NOEXCEPT { minDepthBounds = minDepthBounds_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo &setMaxDepthBounds(float maxDepthBounds_) VULKAN_HPP_NOEXCEPT { maxDepthBounds = maxDepthBounds_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineDepthStencilStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineDepthStencilStateCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, depthTestEnable, depthWriteEnable, depthCompareOp, depthBoundsTestEnable, stencilTestEnable, front, back, minDepthBounds, maxDepthBounds); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineDepthStencilStateCreateInfo const &) const = default; #else bool operator==(PipelineDepthStencilStateCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (depthTestEnable == rhs.depthTestEnable) && (depthWriteEnable == rhs.depthWriteEnable) && (depthCompareOp == rhs.depthCompareOp) && (depthBoundsTestEnable == rhs.depthBoundsTestEnable) && (stencilTestEnable == rhs.stencilTestEnable) && (front == rhs.front) && (back == rhs.back) && (minDepthBounds == rhs.minDepthBounds) && (maxDepthBounds == rhs.maxDepthBounds); # endif } bool operator!=(PipelineDepthStencilStateCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags = {}; VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable = {}; VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable = {}; VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable = {}; VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable = {}; VULKAN_HPP_NAMESPACE::StencilOpState front = {}; VULKAN_HPP_NAMESPACE::StencilOpState back = {}; float minDepthBounds = {}; float maxDepthBounds = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo) == sizeof(VkPipelineDepthStencilStateCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineDepthStencilStateCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineDepthStencilStateCreateInfo; }; struct PipelineColorBlendAttachmentState { using NativeType = VkPipelineColorBlendAttachmentState; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState(VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ = {}, VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ = {}) VULKAN_HPP_NOEXCEPT : blendEnable(blendEnable_), srcColorBlendFactor(srcColorBlendFactor_), dstColorBlendFactor(dstColorBlendFactor_), colorBlendOp(colorBlendOp_), srcAlphaBlendFactor(srcAlphaBlendFactor_), dstAlphaBlendFactor(dstAlphaBlendFactor_), alphaBlendOp(alphaBlendOp_), colorWriteMask(colorWriteMask_) { } VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState(PipelineColorBlendAttachmentState const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineColorBlendAttachmentState(VkPipelineColorBlendAttachmentState const &rhs) VULKAN_HPP_NOEXCEPT : PipelineColorBlendAttachmentState(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineColorBlendAttachmentState &operator=(PipelineColorBlendAttachmentState const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineColorBlendAttachmentState &operator=(VkPipelineColorBlendAttachmentState const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState &setBlendEnable(VULKAN_HPP_NAMESPACE::Bool32 blendEnable_) VULKAN_HPP_NOEXCEPT { blendEnable = blendEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setSrcColorBlendFactor(VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_) VULKAN_HPP_NOEXCEPT { srcColorBlendFactor = srcColorBlendFactor_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setDstColorBlendFactor(VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_) VULKAN_HPP_NOEXCEPT { dstColorBlendFactor = dstColorBlendFactor_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState &setColorBlendOp(VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_) VULKAN_HPP_NOEXCEPT { colorBlendOp = colorBlendOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setSrcAlphaBlendFactor(VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_) VULKAN_HPP_NOEXCEPT { srcAlphaBlendFactor = srcAlphaBlendFactor_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setDstAlphaBlendFactor(VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_) VULKAN_HPP_NOEXCEPT { dstAlphaBlendFactor = dstAlphaBlendFactor_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState &setAlphaBlendOp(VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_) VULKAN_HPP_NOEXCEPT { alphaBlendOp = alphaBlendOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setColorWriteMask(VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_) VULKAN_HPP_NOEXCEPT { colorWriteMask = colorWriteMask_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineColorBlendAttachmentState const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineColorBlendAttachmentState &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( blendEnable, srcColorBlendFactor, dstColorBlendFactor, colorBlendOp, srcAlphaBlendFactor, dstAlphaBlendFactor, alphaBlendOp, colorWriteMask); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineColorBlendAttachmentState const &) const = default; #else bool operator==(PipelineColorBlendAttachmentState const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (blendEnable == rhs.blendEnable) && (srcColorBlendFactor == rhs.srcColorBlendFactor) && (dstColorBlendFactor == rhs.dstColorBlendFactor) && (colorBlendOp == rhs.colorBlendOp) && (srcAlphaBlendFactor == rhs.srcAlphaBlendFactor) && (dstAlphaBlendFactor == rhs.dstAlphaBlendFactor) && (alphaBlendOp == rhs.alphaBlendOp) && (colorWriteMask == rhs.colorWriteMask); # endif } bool operator!=(PipelineColorBlendAttachmentState const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::Bool32 blendEnable = {}; VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState) == sizeof(VkPipelineColorBlendAttachmentState), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineColorBlendAttachmentState is not nothrow_move_constructible!"); struct PipelineColorBlendStateCreateInfo { using NativeType = VkPipelineColorBlendStateCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendStateCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ = {}, VULKAN_HPP_NAMESPACE::LogicOp logicOp_ = VULKAN_HPP_NAMESPACE::LogicOp::eClear, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState *pAttachments_ = {}, std::array const &blendConstants_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), logicOpEnable(logicOpEnable_), logicOp(logicOp_), attachmentCount(attachmentCount_), pAttachments(pAttachments_), blendConstants(blendConstants_) { } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo(PipelineColorBlendStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineColorBlendStateCreateInfo(VkPipelineColorBlendStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : PipelineColorBlendStateCreateInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineColorBlendStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_, VULKAN_HPP_NAMESPACE::LogicOp logicOp_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &attachments_, std::array const &blendConstants_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , logicOpEnable(logicOpEnable_) , logicOp(logicOp_) , attachmentCount(static_cast(attachments_.size())) , pAttachments(attachments_.data()) , blendConstants(blendConstants_) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineColorBlendStateCreateInfo &operator=(PipelineColorBlendStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineColorBlendStateCreateInfo &operator=(VkPipelineColorBlendStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo &setFlags(VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo &setLogicOpEnable(VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_) VULKAN_HPP_NOEXCEPT { logicOpEnable = logicOpEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo &setLogicOp(VULKAN_HPP_NAMESPACE::LogicOp logicOp_) VULKAN_HPP_NOEXCEPT { logicOp = logicOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo &setAttachmentCount(uint32_t attachmentCount_) VULKAN_HPP_NOEXCEPT { attachmentCount = attachmentCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setPAttachments(const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState *pAttachments_) VULKAN_HPP_NOEXCEPT { pAttachments = pAttachments_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineColorBlendStateCreateInfo &setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &attachments_) VULKAN_HPP_NOEXCEPT { attachmentCount = static_cast(attachments_.size()); pAttachments = attachments_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo &setBlendConstants(std::array blendConstants_) VULKAN_HPP_NOEXCEPT { blendConstants = blendConstants_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineColorBlendStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineColorBlendStateCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, logicOpEnable, logicOp, attachmentCount, pAttachments, blendConstants); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineColorBlendStateCreateInfo const &) const = default; #else bool operator==(PipelineColorBlendStateCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (logicOpEnable == rhs.logicOpEnable) && (logicOp == rhs.logicOp) && (attachmentCount == rhs.attachmentCount) && (pAttachments == rhs.pAttachments) && (blendConstants == rhs.blendConstants); # endif } bool operator!=(PipelineColorBlendStateCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags = {}; VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable = {}; VULKAN_HPP_NAMESPACE::LogicOp logicOp = VULKAN_HPP_NAMESPACE::LogicOp::eClear; uint32_t attachmentCount = {}; const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState *pAttachments = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D blendConstants = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo) == sizeof(VkPipelineColorBlendStateCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineColorBlendStateCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineColorBlendStateCreateInfo; }; struct PipelineDynamicStateCreateInfo { using NativeType = VkPipelineDynamicStateCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDynamicStateCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ = {}, uint32_t dynamicStateCount_ = {}, const VULKAN_HPP_NAMESPACE::DynamicState *pDynamicStates_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), dynamicStateCount(dynamicStateCount_), pDynamicStates(pDynamicStates_) { } VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo(PipelineDynamicStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineDynamicStateCreateInfo(VkPipelineDynamicStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : PipelineDynamicStateCreateInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineDynamicStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &dynamicStates_, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , dynamicStateCount(static_cast(dynamicStates_.size())) , pDynamicStates(dynamicStates_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineDynamicStateCreateInfo &operator=(PipelineDynamicStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineDynamicStateCreateInfo &operator=(VkPipelineDynamicStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo &setFlags(VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo &setDynamicStateCount(uint32_t dynamicStateCount_) VULKAN_HPP_NOEXCEPT { dynamicStateCount = dynamicStateCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo &setPDynamicStates(const VULKAN_HPP_NAMESPACE::DynamicState *pDynamicStates_) VULKAN_HPP_NOEXCEPT { pDynamicStates = pDynamicStates_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineDynamicStateCreateInfo & setDynamicStates(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &dynamicStates_) VULKAN_HPP_NOEXCEPT { dynamicStateCount = static_cast(dynamicStates_.size()); pDynamicStates = dynamicStates_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineDynamicStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineDynamicStateCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, dynamicStateCount, pDynamicStates); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineDynamicStateCreateInfo const &) const = default; #else bool operator==(PipelineDynamicStateCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (dynamicStateCount == rhs.dynamicStateCount) && (pDynamicStates == rhs.pDynamicStates); # endif } bool operator!=(PipelineDynamicStateCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDynamicStateCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags = {}; uint32_t dynamicStateCount = {}; const VULKAN_HPP_NAMESPACE::DynamicState *pDynamicStates = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo) == sizeof(VkPipelineDynamicStateCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineDynamicStateCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineDynamicStateCreateInfo; }; struct GraphicsPipelineCreateInfo { using NativeType = VkGraphicsPipelineCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo *pStages_ = {}, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo *pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo *pInputAssemblyState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo *pTessellationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo *pViewportState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo *pRasterizationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo *pMultisampleState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo *pDepthStencilState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo *pColorBlendState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo *pDynamicState_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t subpass_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), stageCount(stageCount_), pStages(pStages_), pVertexInputState(pVertexInputState_), pInputAssemblyState(pInputAssemblyState_), pTessellationState(pTessellationState_), pViewportState(pViewportState_), pRasterizationState(pRasterizationState_), pMultisampleState(pMultisampleState_), pDepthStencilState(pDepthStencilState_), pColorBlendState(pColorBlendState_), pDynamicState(pDynamicState_), layout(layout_), renderPass(renderPass_), subpass(subpass_), basePipelineHandle(basePipelineHandle_), basePipelineIndex(basePipelineIndex_) { } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo(GraphicsPipelineCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; GraphicsPipelineCreateInfo(VkGraphicsPipelineCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : GraphicsPipelineCreateInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) GraphicsPipelineCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &stages_, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo *pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo *pInputAssemblyState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo *pTessellationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo *pViewportState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo *pRasterizationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo *pMultisampleState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo *pDepthStencilState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo *pColorBlendState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo *pDynamicState_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t subpass_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , stageCount(static_cast(stages_.size())) , pStages(stages_.data()) , pVertexInputState(pVertexInputState_) , pInputAssemblyState(pInputAssemblyState_) , pTessellationState(pTessellationState_) , pViewportState(pViewportState_) , pRasterizationState(pRasterizationState_) , pMultisampleState(pMultisampleState_) , pDepthStencilState(pDepthStencilState_) , pColorBlendState(pColorBlendState_) , pDynamicState(pDynamicState_) , layout(layout_) , renderPass(renderPass_) , subpass(subpass_) , basePipelineHandle(basePipelineHandle_) , basePipelineIndex(basePipelineIndex_) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ GraphicsPipelineCreateInfo &operator=(GraphicsPipelineCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; GraphicsPipelineCreateInfo &operator=(VkGraphicsPipelineCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &setFlags(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &setStageCount(uint32_t stageCount_) VULKAN_HPP_NOEXCEPT { stageCount = stageCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &setPStages(const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo *pStages_) VULKAN_HPP_NOEXCEPT { pStages = pStages_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) GraphicsPipelineCreateInfo & setStages(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &stages_) VULKAN_HPP_NOEXCEPT { stageCount = static_cast(stages_.size()); pStages = stages_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPVertexInputState(const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo *pVertexInputState_) VULKAN_HPP_NOEXCEPT { pVertexInputState = pVertexInputState_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPInputAssemblyState(const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo *pInputAssemblyState_) VULKAN_HPP_NOEXCEPT { pInputAssemblyState = pInputAssemblyState_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPTessellationState(const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo *pTessellationState_) VULKAN_HPP_NOEXCEPT { pTessellationState = pTessellationState_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPViewportState(const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo *pViewportState_) VULKAN_HPP_NOEXCEPT { pViewportState = pViewportState_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPRasterizationState(const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo *pRasterizationState_) VULKAN_HPP_NOEXCEPT { pRasterizationState = pRasterizationState_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPMultisampleState(const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo *pMultisampleState_) VULKAN_HPP_NOEXCEPT { pMultisampleState = pMultisampleState_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPDepthStencilState(const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo *pDepthStencilState_) VULKAN_HPP_NOEXCEPT { pDepthStencilState = pDepthStencilState_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPColorBlendState(const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo *pColorBlendState_) VULKAN_HPP_NOEXCEPT { pColorBlendState = pColorBlendState_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPDynamicState(const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo *pDynamicState_) VULKAN_HPP_NOEXCEPT { pDynamicState = pDynamicState_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &setLayout(VULKAN_HPP_NAMESPACE::PipelineLayout layout_) VULKAN_HPP_NOEXCEPT { layout = layout_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &setRenderPass(VULKAN_HPP_NAMESPACE::RenderPass renderPass_) VULKAN_HPP_NOEXCEPT { renderPass = renderPass_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &setSubpass(uint32_t subpass_) VULKAN_HPP_NOEXCEPT { subpass = subpass_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &setBasePipelineHandle(VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_) VULKAN_HPP_NOEXCEPT { basePipelineHandle = basePipelineHandle_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &setBasePipelineIndex(int32_t basePipelineIndex_) VULKAN_HPP_NOEXCEPT { basePipelineIndex = basePipelineIndex_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkGraphicsPipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkGraphicsPipelineCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, stageCount, pStages, pVertexInputState, pInputAssemblyState, pTessellationState, pViewportState, pRasterizationState, pMultisampleState, pDepthStencilState, pColorBlendState, pDynamicState, layout, renderPass, subpass, basePipelineHandle, basePipelineIndex); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(GraphicsPipelineCreateInfo const &) const = default; #else bool operator==(GraphicsPipelineCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (stageCount == rhs.stageCount) && (pStages == rhs.pStages) && (pVertexInputState == rhs.pVertexInputState) && (pInputAssemblyState == rhs.pInputAssemblyState) && (pTessellationState == rhs.pTessellationState) && (pViewportState == rhs.pViewportState) && (pRasterizationState == rhs.pRasterizationState) && (pMultisampleState == rhs.pMultisampleState) && (pDepthStencilState == rhs.pDepthStencilState) && (pColorBlendState == rhs.pColorBlendState) && (pDynamicState == rhs.pDynamicState) && (layout == rhs.layout) && (renderPass == rhs.renderPass) && (subpass == rhs.subpass) && (basePipelineHandle == rhs.basePipelineHandle) && (basePipelineIndex == rhs.basePipelineIndex); # endif } bool operator!=(GraphicsPipelineCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; uint32_t stageCount = {}; const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo *pStages = {}; const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo *pVertexInputState = {}; const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo *pInputAssemblyState = {}; const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo *pTessellationState = {}; const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo *pViewportState = {}; const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo *pRasterizationState = {}; const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo *pMultisampleState = {}; const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo *pDepthStencilState = {}; const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo *pColorBlendState = {}; const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo *pDynamicState = {}; VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; uint32_t subpass = {}; VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; int32_t basePipelineIndex = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo) == sizeof(VkGraphicsPipelineCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "GraphicsPipelineCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = GraphicsPipelineCreateInfo; }; struct GraphicsPipelineLibraryCreateInfoEXT { using NativeType = VkGraphicsPipelineLibraryCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineLibraryCreateInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR GraphicsPipelineLibraryCreateInfoEXT(VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT flags_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_) { } VULKAN_HPP_CONSTEXPR GraphicsPipelineLibraryCreateInfoEXT(GraphicsPipelineLibraryCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; GraphicsPipelineLibraryCreateInfoEXT(VkGraphicsPipelineLibraryCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : GraphicsPipelineLibraryCreateInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ GraphicsPipelineLibraryCreateInfoEXT &operator=(GraphicsPipelineLibraryCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; GraphicsPipelineLibraryCreateInfoEXT &operator=(VkGraphicsPipelineLibraryCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineLibraryCreateInfoEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineLibraryCreateInfoEXT &setFlags(VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkGraphicsPipelineLibraryCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkGraphicsPipelineLibraryCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(GraphicsPipelineLibraryCreateInfoEXT const &) const = default; #else bool operator==(GraphicsPipelineLibraryCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags); # endif } bool operator!=(GraphicsPipelineLibraryCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineLibraryCreateInfoEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT flags = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryCreateInfoEXT) == sizeof(VkGraphicsPipelineLibraryCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "GraphicsPipelineLibraryCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = GraphicsPipelineLibraryCreateInfoEXT; }; struct GraphicsShaderGroupCreateInfoNV { using NativeType = VkGraphicsShaderGroupCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsShaderGroupCreateInfoNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV(uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo *pStages_ = {}, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo *pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo *pTessellationState_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), stageCount(stageCount_), pStages(pStages_), pVertexInputState(pVertexInputState_), pTessellationState(pTessellationState_) { } VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV(GraphicsShaderGroupCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; GraphicsShaderGroupCreateInfoNV(VkGraphicsShaderGroupCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : GraphicsShaderGroupCreateInfoNV(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) GraphicsShaderGroupCreateInfoNV(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &stages_, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo *pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo *pTessellationState_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , stageCount(static_cast(stages_.size())) , pStages(stages_.data()) , pVertexInputState(pVertexInputState_) , pTessellationState(pTessellationState_) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ GraphicsShaderGroupCreateInfoNV &operator=(GraphicsShaderGroupCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; GraphicsShaderGroupCreateInfoNV &operator=(VkGraphicsShaderGroupCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV &setStageCount(uint32_t stageCount_) VULKAN_HPP_NOEXCEPT { stageCount = stageCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setPStages(const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo *pStages_) VULKAN_HPP_NOEXCEPT { pStages = pStages_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) GraphicsShaderGroupCreateInfoNV & setStages(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &stages_) VULKAN_HPP_NOEXCEPT { stageCount = static_cast(stages_.size()); pStages = stages_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setPVertexInputState(const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo *pVertexInputState_) VULKAN_HPP_NOEXCEPT { pVertexInputState = pVertexInputState_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setPTessellationState(const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo *pTessellationState_) VULKAN_HPP_NOEXCEPT { pTessellationState = pTessellationState_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkGraphicsShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkGraphicsShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, stageCount, pStages, pVertexInputState, pTessellationState); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(GraphicsShaderGroupCreateInfoNV const &) const = default; #else bool operator==(GraphicsShaderGroupCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (stageCount == rhs.stageCount) && (pStages == rhs.pStages) && (pVertexInputState == rhs.pVertexInputState) && (pTessellationState == rhs.pTessellationState); # endif } bool operator!=(GraphicsShaderGroupCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsShaderGroupCreateInfoNV; const void *pNext = {}; uint32_t stageCount = {}; const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo *pStages = {}; const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo *pVertexInputState = {}; const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo *pTessellationState = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV) == sizeof(VkGraphicsShaderGroupCreateInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "GraphicsShaderGroupCreateInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = GraphicsShaderGroupCreateInfoNV; }; struct GraphicsPipelineShaderGroupsCreateInfoNV { using NativeType = VkGraphicsPipelineShaderGroupsCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV(uint32_t groupCount_ = {}, const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV *pGroups_ = {}, uint32_t pipelineCount_ = {}, const VULKAN_HPP_NAMESPACE::Pipeline *pPipelines_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), groupCount(groupCount_), pGroups(pGroups_), pipelineCount(pipelineCount_), pPipelines(pPipelines_) { } VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV(GraphicsPipelineShaderGroupsCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; GraphicsPipelineShaderGroupsCreateInfoNV(VkGraphicsPipelineShaderGroupsCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : GraphicsPipelineShaderGroupsCreateInfoNV(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) GraphicsPipelineShaderGroupsCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &groups_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &pipelines_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , groupCount(static_cast(groups_.size())) , pGroups(groups_.data()) , pipelineCount(static_cast(pipelines_.size())) , pPipelines(pipelines_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ GraphicsPipelineShaderGroupsCreateInfoNV &operator=(GraphicsPipelineShaderGroupsCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; GraphicsPipelineShaderGroupsCreateInfoNV &operator=(VkGraphicsPipelineShaderGroupsCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV &setGroupCount(uint32_t groupCount_) VULKAN_HPP_NOEXCEPT { groupCount = groupCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPGroups(const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV *pGroups_) VULKAN_HPP_NOEXCEPT { pGroups = pGroups_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) GraphicsPipelineShaderGroupsCreateInfoNV &setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &groups_) VULKAN_HPP_NOEXCEPT { groupCount = static_cast(groups_.size()); pGroups = groups_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV &setPipelineCount(uint32_t pipelineCount_) VULKAN_HPP_NOEXCEPT { pipelineCount = pipelineCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV &setPPipelines(const VULKAN_HPP_NAMESPACE::Pipeline *pPipelines_) VULKAN_HPP_NOEXCEPT { pPipelines = pPipelines_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) GraphicsPipelineShaderGroupsCreateInfoNV & setPipelines(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &pipelines_) VULKAN_HPP_NOEXCEPT { pipelineCount = static_cast(pipelines_.size()); pPipelines = pipelines_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkGraphicsPipelineShaderGroupsCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkGraphicsPipelineShaderGroupsCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, groupCount, pGroups, pipelineCount, pPipelines); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(GraphicsPipelineShaderGroupsCreateInfoNV const &) const = default; #else bool operator==(GraphicsPipelineShaderGroupsCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (groupCount == rhs.groupCount) && (pGroups == rhs.pGroups) && (pipelineCount == rhs.pipelineCount) && (pPipelines == rhs.pPipelines); # endif } bool operator!=(GraphicsPipelineShaderGroupsCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV; const void *pNext = {}; uint32_t groupCount = {}; const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV *pGroups = {}; uint32_t pipelineCount = {}; const VULKAN_HPP_NAMESPACE::Pipeline *pPipelines = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV) == sizeof(VkGraphicsPipelineShaderGroupsCreateInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "GraphicsPipelineShaderGroupsCreateInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = GraphicsPipelineShaderGroupsCreateInfoNV; }; struct XYColorEXT { using NativeType = VkXYColorEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR XYColorEXT(float x_ = {}, float y_ = {}) VULKAN_HPP_NOEXCEPT : x(x_), y(y_) { } VULKAN_HPP_CONSTEXPR XYColorEXT(XYColorEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; XYColorEXT(VkXYColorEXT const &rhs) VULKAN_HPP_NOEXCEPT : XYColorEXT(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ XYColorEXT &operator=(XYColorEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; XYColorEXT &operator=(VkXYColorEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 XYColorEXT &setX(float x_) VULKAN_HPP_NOEXCEPT { x = x_; return *this; } VULKAN_HPP_CONSTEXPR_14 XYColorEXT &setY(float y_) VULKAN_HPP_NOEXCEPT { y = y_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkXYColorEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkXYColorEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(x, y); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(XYColorEXT const &) const = default; #else bool operator==(XYColorEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (x == rhs.x) && (y == rhs.y); # endif } bool operator!=(XYColorEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: float x = {}; float y = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::XYColorEXT) == sizeof(VkXYColorEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "XYColorEXT is not nothrow_move_constructible!"); struct HdrMetadataEXT { using NativeType = VkHdrMetadataEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHdrMetadataEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR HdrMetadataEXT(VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ = {}, VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ = {}, VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ = {}, VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ = {}, float maxLuminance_ = {}, float minLuminance_ = {}, float maxContentLightLevel_ = {}, float maxFrameAverageLightLevel_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), displayPrimaryRed(displayPrimaryRed_), displayPrimaryGreen(displayPrimaryGreen_), displayPrimaryBlue(displayPrimaryBlue_), whitePoint(whitePoint_), maxLuminance(maxLuminance_), minLuminance(minLuminance_), maxContentLightLevel(maxContentLightLevel_), maxFrameAverageLightLevel(maxFrameAverageLightLevel_) { } VULKAN_HPP_CONSTEXPR HdrMetadataEXT(HdrMetadataEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; HdrMetadataEXT(VkHdrMetadataEXT const &rhs) VULKAN_HPP_NOEXCEPT : HdrMetadataEXT(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ HdrMetadataEXT &operator=(HdrMetadataEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; HdrMetadataEXT &operator=(VkHdrMetadataEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT &setDisplayPrimaryRed(VULKAN_HPP_NAMESPACE::XYColorEXT const &displayPrimaryRed_) VULKAN_HPP_NOEXCEPT { displayPrimaryRed = displayPrimaryRed_; return *this; } VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT &setDisplayPrimaryGreen(VULKAN_HPP_NAMESPACE::XYColorEXT const &displayPrimaryGreen_) VULKAN_HPP_NOEXCEPT { displayPrimaryGreen = displayPrimaryGreen_; return *this; } VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT &setDisplayPrimaryBlue(VULKAN_HPP_NAMESPACE::XYColorEXT const &displayPrimaryBlue_) VULKAN_HPP_NOEXCEPT { displayPrimaryBlue = displayPrimaryBlue_; return *this; } VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT &setWhitePoint(VULKAN_HPP_NAMESPACE::XYColorEXT const &whitePoint_) VULKAN_HPP_NOEXCEPT { whitePoint = whitePoint_; return *this; } VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT &setMaxLuminance(float maxLuminance_) VULKAN_HPP_NOEXCEPT { maxLuminance = maxLuminance_; return *this; } VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT &setMinLuminance(float minLuminance_) VULKAN_HPP_NOEXCEPT { minLuminance = minLuminance_; return *this; } VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT &setMaxContentLightLevel(float maxContentLightLevel_) VULKAN_HPP_NOEXCEPT { maxContentLightLevel = maxContentLightLevel_; return *this; } VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT &setMaxFrameAverageLightLevel(float maxFrameAverageLightLevel_) VULKAN_HPP_NOEXCEPT { maxFrameAverageLightLevel = maxFrameAverageLightLevel_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkHdrMetadataEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkHdrMetadataEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, displayPrimaryRed, displayPrimaryGreen, displayPrimaryBlue, whitePoint, maxLuminance, minLuminance, maxContentLightLevel, maxFrameAverageLightLevel); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(HdrMetadataEXT const &) const = default; #else bool operator==(HdrMetadataEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (displayPrimaryRed == rhs.displayPrimaryRed) && (displayPrimaryGreen == rhs.displayPrimaryGreen) && (displayPrimaryBlue == rhs.displayPrimaryBlue) && (whitePoint == rhs.whitePoint) && (maxLuminance == rhs.maxLuminance) && (minLuminance == rhs.minLuminance) && (maxContentLightLevel == rhs.maxContentLightLevel) && (maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel); # endif } bool operator!=(HdrMetadataEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHdrMetadataEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed = {}; VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen = {}; VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue = {}; VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint = {}; float maxLuminance = {}; float minLuminance = {}; float maxContentLightLevel = {}; float maxFrameAverageLightLevel = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::HdrMetadataEXT) == sizeof(VkHdrMetadataEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "HdrMetadataEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = HdrMetadataEXT; }; struct HeadlessSurfaceCreateInfoEXT { using NativeType = VkHeadlessSurfaceCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHeadlessSurfaceCreateInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT(VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_) { } VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT(HeadlessSurfaceCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; HeadlessSurfaceCreateInfoEXT(VkHeadlessSurfaceCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : HeadlessSurfaceCreateInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ HeadlessSurfaceCreateInfoEXT &operator=(HeadlessSurfaceCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; HeadlessSurfaceCreateInfoEXT &operator=(VkHeadlessSurfaceCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT &setFlags(VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkHeadlessSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkHeadlessSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(HeadlessSurfaceCreateInfoEXT const &) const = default; #else bool operator==(HeadlessSurfaceCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags); # endif } bool operator!=(HeadlessSurfaceCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHeadlessSurfaceCreateInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT) == sizeof(VkHeadlessSurfaceCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "HeadlessSurfaceCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = HeadlessSurfaceCreateInfoEXT; }; #if defined(VK_USE_PLATFORM_IOS_MVK) struct IOSSurfaceCreateInfoMVK { using NativeType = VkIOSSurfaceCreateInfoMVK; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIosSurfaceCreateInfoMVK; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK(VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ = {}, const void *pView_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), pView(pView_) { } VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK(IOSSurfaceCreateInfoMVK const &rhs) VULKAN_HPP_NOEXCEPT = default; IOSSurfaceCreateInfoMVK(VkIOSSurfaceCreateInfoMVK const &rhs) VULKAN_HPP_NOEXCEPT : IOSSurfaceCreateInfoMVK(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ IOSSurfaceCreateInfoMVK &operator=(IOSSurfaceCreateInfoMVK const &rhs) VULKAN_HPP_NOEXCEPT = default; IOSSurfaceCreateInfoMVK &operator=(VkIOSSurfaceCreateInfoMVK const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK &setFlags(VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK &setPView(const void *pView_) VULKAN_HPP_NOEXCEPT { pView = pView_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkIOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkIOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, pView); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(IOSSurfaceCreateInfoMVK const &) const = default; # else bool operator==(IOSSurfaceCreateInfoMVK const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (pView == rhs.pView); # endif } bool operator!=(IOSSurfaceCreateInfoMVK const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIosSurfaceCreateInfoMVK; const void *pNext = {}; VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags = {}; const void *pView = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK) == sizeof(VkIOSSurfaceCreateInfoMVK), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "IOSSurfaceCreateInfoMVK is not nothrow_move_constructible!"); template<> struct CppType { using Type = IOSSurfaceCreateInfoMVK; }; #endif /*VK_USE_PLATFORM_IOS_MVK*/ struct ImageBlit { using NativeType = VkImageBlit; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 ImageBlit(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, std::array const &srcOffsets_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, std::array const &dstOffsets_ = {}) VULKAN_HPP_NOEXCEPT : srcSubresource(srcSubresource_), srcOffsets(srcOffsets_), dstSubresource(dstSubresource_), dstOffsets(dstOffsets_) { } VULKAN_HPP_CONSTEXPR_14 ImageBlit(ImageBlit const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageBlit(VkImageBlit const &rhs) VULKAN_HPP_NOEXCEPT : ImageBlit(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageBlit &operator=(ImageBlit const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageBlit &operator=(VkImageBlit const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImageBlit &setSrcSubresource(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &srcSubresource_) VULKAN_HPP_NOEXCEPT { srcSubresource = srcSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageBlit &setSrcOffsets(std::array const &srcOffsets_) VULKAN_HPP_NOEXCEPT { srcOffsets = srcOffsets_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageBlit &setDstSubresource(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &dstSubresource_) VULKAN_HPP_NOEXCEPT { dstSubresource = dstSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageBlit &setDstOffsets(std::array const &dstOffsets_) VULKAN_HPP_NOEXCEPT { dstOffsets = dstOffsets_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImageBlit const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImageBlit &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(srcSubresource, srcOffsets, dstSubresource, dstOffsets); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImageBlit const &) const = default; #else bool operator==(ImageBlit const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (srcSubresource == rhs.srcSubresource) && (srcOffsets == rhs.srcOffsets) && (dstSubresource == rhs.dstSubresource) && (dstOffsets == rhs.dstOffsets); # endif } bool operator!=(ImageBlit const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D srcOffsets = {}; VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D dstOffsets = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImageBlit) == sizeof(VkImageBlit), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImageBlit is not nothrow_move_constructible!"); #if defined(VK_USE_PLATFORM_FUCHSIA) struct ImageFormatConstraintsInfoFUCHSIA { using NativeType = VkImageFormatConstraintsInfoFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatConstraintsInfoFUCHSIA; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImageFormatConstraintsInfoFUCHSIA(VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ = {}, VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_ = {}, uint64_t sysmemPixelFormat_ = {}, uint32_t colorSpaceCount_ = {}, const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA *pColorSpaces_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), imageCreateInfo(imageCreateInfo_), requiredFormatFeatures(requiredFormatFeatures_), flags(flags_), sysmemPixelFormat(sysmemPixelFormat_), colorSpaceCount(colorSpaceCount_), pColorSpaces(pColorSpaces_) { } VULKAN_HPP_CONSTEXPR ImageFormatConstraintsInfoFUCHSIA(ImageFormatConstraintsInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageFormatConstraintsInfoFUCHSIA(VkImageFormatConstraintsInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT : ImageFormatConstraintsInfoFUCHSIA(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) ImageFormatConstraintsInfoFUCHSIA(VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo_, VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_, VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_, uint64_t sysmemPixelFormat_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &colorSpaces_, const void *pNext_ = nullptr) : pNext(pNext_) , imageCreateInfo(imageCreateInfo_) , requiredFormatFeatures(requiredFormatFeatures_) , flags(flags_) , sysmemPixelFormat(sysmemPixelFormat_) , colorSpaceCount(static_cast(colorSpaces_.size())) , pColorSpaces(colorSpaces_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageFormatConstraintsInfoFUCHSIA &operator=(ImageFormatConstraintsInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageFormatConstraintsInfoFUCHSIA &operator=(VkImageFormatConstraintsInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setImageCreateInfo(VULKAN_HPP_NAMESPACE::ImageCreateInfo const &imageCreateInfo_) VULKAN_HPP_NOEXCEPT { imageCreateInfo = imageCreateInfo_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setRequiredFormatFeatures(VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_) VULKAN_HPP_NOEXCEPT { requiredFormatFeatures = requiredFormatFeatures_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA &setFlags(VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA &setSysmemPixelFormat(uint64_t sysmemPixelFormat_) VULKAN_HPP_NOEXCEPT { sysmemPixelFormat = sysmemPixelFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA &setColorSpaceCount(uint32_t colorSpaceCount_) VULKAN_HPP_NOEXCEPT { colorSpaceCount = colorSpaceCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setPColorSpaces(const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA *pColorSpaces_) VULKAN_HPP_NOEXCEPT { pColorSpaces = pColorSpaces_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) ImageFormatConstraintsInfoFUCHSIA &setColorSpaces( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &colorSpaces_) VULKAN_HPP_NOEXCEPT { colorSpaceCount = static_cast(colorSpaces_.size()); pColorSpaces = colorSpaces_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImageFormatConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImageFormatConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, imageCreateInfo, requiredFormatFeatures, flags, sysmemPixelFormat, colorSpaceCount, pColorSpaces); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImageFormatConstraintsInfoFUCHSIA const &) const = default; # else bool operator==(ImageFormatConstraintsInfoFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (imageCreateInfo == rhs.imageCreateInfo) && (requiredFormatFeatures == rhs.requiredFormatFeatures) && (flags == rhs.flags) && (sysmemPixelFormat == rhs.sysmemPixelFormat) && (colorSpaceCount == rhs.colorSpaceCount) && (pColorSpaces == rhs.pColorSpaces); # endif } bool operator!=(ImageFormatConstraintsInfoFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatConstraintsInfoFUCHSIA; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo = {}; VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures = {}; VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags = {}; uint64_t sysmemPixelFormat = {}; uint32_t colorSpaceCount = {}; const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA *pColorSpaces = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA) == sizeof(VkImageFormatConstraintsInfoFUCHSIA), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImageFormatConstraintsInfoFUCHSIA is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImageFormatConstraintsInfoFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined(VK_USE_PLATFORM_FUCHSIA) struct ImageConstraintsInfoFUCHSIA { using NativeType = VkImageConstraintsInfoFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageConstraintsInfoFUCHSIA; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFUCHSIA(uint32_t formatConstraintsCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA *pFormatConstraints_ = {}, VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {}, VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), formatConstraintsCount(formatConstraintsCount_), pFormatConstraints(pFormatConstraints_), bufferCollectionConstraints(bufferCollectionConstraints_), flags(flags_) { } VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFUCHSIA(ImageConstraintsInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageConstraintsInfoFUCHSIA(VkImageConstraintsInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT : ImageConstraintsInfoFUCHSIA(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) ImageConstraintsInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &formatConstraints_, VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {}, VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , formatConstraintsCount(static_cast(formatConstraints_.size())) , pFormatConstraints(formatConstraints_.data()) , bufferCollectionConstraints(bufferCollectionConstraints_) , flags(flags_) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageConstraintsInfoFUCHSIA &operator=(ImageConstraintsInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageConstraintsInfoFUCHSIA &operator=(VkImageConstraintsInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA &setFormatConstraintsCount(uint32_t formatConstraintsCount_) VULKAN_HPP_NOEXCEPT { formatConstraintsCount = formatConstraintsCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setPFormatConstraints(const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA *pFormatConstraints_) VULKAN_HPP_NOEXCEPT { pFormatConstraints = pFormatConstraints_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) ImageConstraintsInfoFUCHSIA &setFormatConstraints( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &formatConstraints_) VULKAN_HPP_NOEXCEPT { formatConstraintsCount = static_cast(formatConstraints_.size()); pFormatConstraints = formatConstraints_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setBufferCollectionConstraints(VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const &bufferCollectionConstraints_) VULKAN_HPP_NOEXCEPT { bufferCollectionConstraints = bufferCollectionConstraints_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA &setFlags(VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImageConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImageConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, formatConstraintsCount, pFormatConstraints, bufferCollectionConstraints, flags); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImageConstraintsInfoFUCHSIA const &) const = default; # else bool operator==(ImageConstraintsInfoFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (formatConstraintsCount == rhs.formatConstraintsCount) && (pFormatConstraints == rhs.pFormatConstraints) && (bufferCollectionConstraints == rhs.bufferCollectionConstraints) && (flags == rhs.flags); # endif } bool operator!=(ImageConstraintsInfoFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageConstraintsInfoFUCHSIA; const void *pNext = {}; uint32_t formatConstraintsCount = {}; const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA *pFormatConstraints = {}; VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints = {}; VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA) == sizeof(VkImageConstraintsInfoFUCHSIA), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImageConstraintsInfoFUCHSIA is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImageConstraintsInfoFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ struct ImageCopy { using NativeType = VkImageCopy; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImageCopy(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT : srcSubresource(srcSubresource_), srcOffset(srcOffset_), dstSubresource(dstSubresource_), dstOffset(dstOffset_), extent(extent_) { } VULKAN_HPP_CONSTEXPR ImageCopy(ImageCopy const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageCopy(VkImageCopy const &rhs) VULKAN_HPP_NOEXCEPT : ImageCopy(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageCopy &operator=(ImageCopy const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageCopy &operator=(VkImageCopy const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImageCopy &setSrcSubresource(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &srcSubresource_) VULKAN_HPP_NOEXCEPT { srcSubresource = srcSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCopy &setSrcOffset(VULKAN_HPP_NAMESPACE::Offset3D const &srcOffset_) VULKAN_HPP_NOEXCEPT { srcOffset = srcOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCopy &setDstSubresource(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &dstSubresource_) VULKAN_HPP_NOEXCEPT { dstSubresource = dstSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCopy &setDstOffset(VULKAN_HPP_NAMESPACE::Offset3D const &dstOffset_) VULKAN_HPP_NOEXCEPT { dstOffset = dstOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCopy &setExtent(VULKAN_HPP_NAMESPACE::Extent3D const &extent_) VULKAN_HPP_NOEXCEPT { extent = extent_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImageCopy const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImageCopy &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(srcSubresource, srcOffset, dstSubresource, dstOffset, extent); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImageCopy const &) const = default; #else bool operator==(ImageCopy const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (srcSubresource == rhs.srcSubresource) && (srcOffset == rhs.srcOffset) && (dstSubresource == rhs.dstSubresource) && (dstOffset == rhs.dstOffset) && (extent == rhs.extent); # endif } bool operator!=(ImageCopy const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; VULKAN_HPP_NAMESPACE::Extent3D extent = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImageCopy) == sizeof(VkImageCopy), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImageCopy is not nothrow_move_constructible!"); struct SubresourceLayout { using NativeType = VkSubresourceLayout; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SubresourceLayout(VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ = {}) VULKAN_HPP_NOEXCEPT : offset(offset_), size(size_), rowPitch(rowPitch_), arrayPitch(arrayPitch_), depthPitch(depthPitch_) { } VULKAN_HPP_CONSTEXPR SubresourceLayout(SubresourceLayout const &rhs) VULKAN_HPP_NOEXCEPT = default; SubresourceLayout(VkSubresourceLayout const &rhs) VULKAN_HPP_NOEXCEPT : SubresourceLayout(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SubresourceLayout &operator=(SubresourceLayout const &rhs) VULKAN_HPP_NOEXCEPT = default; SubresourceLayout &operator=(VkSubresourceLayout const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkSubresourceLayout const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSubresourceLayout &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(offset, size, rowPitch, arrayPitch, depthPitch); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SubresourceLayout const &) const = default; #else bool operator==(SubresourceLayout const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (offset == rhs.offset) && (size == rhs.size) && (rowPitch == rhs.rowPitch) && (arrayPitch == rhs.arrayPitch) && (depthPitch == rhs.depthPitch); # endif } bool operator!=(SubresourceLayout const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; VULKAN_HPP_NAMESPACE::DeviceSize size = {}; VULKAN_HPP_NAMESPACE::DeviceSize rowPitch = {}; VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch = {}; VULKAN_HPP_NAMESPACE::DeviceSize depthPitch = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SubresourceLayout) == sizeof(VkSubresourceLayout), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SubresourceLayout is not nothrow_move_constructible!"); struct ImageDrmFormatModifierExplicitCreateInfoEXT { using NativeType = VkImageDrmFormatModifierExplicitCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT(uint64_t drmFormatModifier_ = {}, uint32_t drmFormatModifierPlaneCount_ = {}, const VULKAN_HPP_NAMESPACE::SubresourceLayout *pPlaneLayouts_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), drmFormatModifier(drmFormatModifier_), drmFormatModifierPlaneCount(drmFormatModifierPlaneCount_), pPlaneLayouts(pPlaneLayouts_) { } VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT(ImageDrmFormatModifierExplicitCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageDrmFormatModifierExplicitCreateInfoEXT(VkImageDrmFormatModifierExplicitCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : ImageDrmFormatModifierExplicitCreateInfoEXT(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t drmFormatModifier_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &planeLayouts_, const void *pNext_ = nullptr) : pNext(pNext_) , drmFormatModifier(drmFormatModifier_) , drmFormatModifierPlaneCount(static_cast(planeLayouts_.size())) , pPlaneLayouts(planeLayouts_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageDrmFormatModifierExplicitCreateInfoEXT &operator=(ImageDrmFormatModifierExplicitCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageDrmFormatModifierExplicitCreateInfoEXT &operator=(VkImageDrmFormatModifierExplicitCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT &setDrmFormatModifier(uint64_t drmFormatModifier_) VULKAN_HPP_NOEXCEPT { drmFormatModifier = drmFormatModifier_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifierPlaneCount(uint32_t drmFormatModifierPlaneCount_) VULKAN_HPP_NOEXCEPT { drmFormatModifierPlaneCount = drmFormatModifierPlaneCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setPPlaneLayouts(const VULKAN_HPP_NAMESPACE::SubresourceLayout *pPlaneLayouts_) VULKAN_HPP_NOEXCEPT { pPlaneLayouts = pPlaneLayouts_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) ImageDrmFormatModifierExplicitCreateInfoEXT & setPlaneLayouts(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &planeLayouts_) VULKAN_HPP_NOEXCEPT { drmFormatModifierPlaneCount = static_cast(planeLayouts_.size()); pPlaneLayouts = planeLayouts_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImageDrmFormatModifierExplicitCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImageDrmFormatModifierExplicitCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, drmFormatModifier, drmFormatModifierPlaneCount, pPlaneLayouts); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImageDrmFormatModifierExplicitCreateInfoEXT const &) const = default; #else bool operator==(ImageDrmFormatModifierExplicitCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (drmFormatModifier == rhs.drmFormatModifier) && (drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount) && (pPlaneLayouts == rhs.pPlaneLayouts); # endif } bool operator!=(ImageDrmFormatModifierExplicitCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT; const void *pNext = {}; uint64_t drmFormatModifier = {}; uint32_t drmFormatModifierPlaneCount = {}; const VULKAN_HPP_NAMESPACE::SubresourceLayout *pPlaneLayouts = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT) == sizeof(VkImageDrmFormatModifierExplicitCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImageDrmFormatModifierExplicitCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImageDrmFormatModifierExplicitCreateInfoEXT; }; struct ImageDrmFormatModifierListCreateInfoEXT { using NativeType = VkImageDrmFormatModifierListCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierListCreateInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT(uint32_t drmFormatModifierCount_ = {}, const uint64_t *pDrmFormatModifiers_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), drmFormatModifierCount(drmFormatModifierCount_), pDrmFormatModifiers(pDrmFormatModifiers_) { } VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT(ImageDrmFormatModifierListCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageDrmFormatModifierListCreateInfoEXT(VkImageDrmFormatModifierListCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : ImageDrmFormatModifierListCreateInfoEXT(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) ImageDrmFormatModifierListCreateInfoEXT(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &drmFormatModifiers_, const void *pNext_ = nullptr) : pNext(pNext_) , drmFormatModifierCount(static_cast(drmFormatModifiers_.size())) , pDrmFormatModifiers(drmFormatModifiers_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageDrmFormatModifierListCreateInfoEXT &operator=(ImageDrmFormatModifierListCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageDrmFormatModifierListCreateInfoEXT &operator=(VkImageDrmFormatModifierListCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT &setDrmFormatModifierCount(uint32_t drmFormatModifierCount_) VULKAN_HPP_NOEXCEPT { drmFormatModifierCount = drmFormatModifierCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT &setPDrmFormatModifiers(const uint64_t *pDrmFormatModifiers_) VULKAN_HPP_NOEXCEPT { pDrmFormatModifiers = pDrmFormatModifiers_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifiers(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &drmFormatModifiers_) VULKAN_HPP_NOEXCEPT { drmFormatModifierCount = static_cast(drmFormatModifiers_.size()); pDrmFormatModifiers = drmFormatModifiers_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImageDrmFormatModifierListCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImageDrmFormatModifierListCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, drmFormatModifierCount, pDrmFormatModifiers); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImageDrmFormatModifierListCreateInfoEXT const &) const = default; #else bool operator==(ImageDrmFormatModifierListCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (drmFormatModifierCount == rhs.drmFormatModifierCount) && (pDrmFormatModifiers == rhs.pDrmFormatModifiers); # endif } bool operator!=(ImageDrmFormatModifierListCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT; const void *pNext = {}; uint32_t drmFormatModifierCount = {}; const uint64_t *pDrmFormatModifiers = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT) == sizeof(VkImageDrmFormatModifierListCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImageDrmFormatModifierListCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImageDrmFormatModifierListCreateInfoEXT; }; struct ImageDrmFormatModifierPropertiesEXT { using NativeType = VkImageDrmFormatModifierPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierPropertiesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT(uint64_t drmFormatModifier_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), drmFormatModifier(drmFormatModifier_) { } VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT(ImageDrmFormatModifierPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageDrmFormatModifierPropertiesEXT(VkImageDrmFormatModifierPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT : ImageDrmFormatModifierPropertiesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageDrmFormatModifierPropertiesEXT &operator=(ImageDrmFormatModifierPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageDrmFormatModifierPropertiesEXT &operator=(VkImageDrmFormatModifierPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkImageDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImageDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, drmFormatModifier); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImageDrmFormatModifierPropertiesEXT const &) const = default; #else bool operator==(ImageDrmFormatModifierPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (drmFormatModifier == rhs.drmFormatModifier); # endif } bool operator!=(ImageDrmFormatModifierPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierPropertiesEXT; void *pNext = {}; uint64_t drmFormatModifier = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT) == sizeof(VkImageDrmFormatModifierPropertiesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImageDrmFormatModifierPropertiesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImageDrmFormatModifierPropertiesEXT; }; struct ImageFormatListCreateInfo { using NativeType = VkImageFormatListCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatListCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo(uint32_t viewFormatCount_ = {}, const VULKAN_HPP_NAMESPACE::Format *pViewFormats_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), viewFormatCount(viewFormatCount_), pViewFormats(pViewFormats_) { } VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo(ImageFormatListCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageFormatListCreateInfo(VkImageFormatListCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : ImageFormatListCreateInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) ImageFormatListCreateInfo(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &viewFormats_, const void *pNext_ = nullptr) : pNext(pNext_) , viewFormatCount(static_cast(viewFormats_.size())) , pViewFormats(viewFormats_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageFormatListCreateInfo &operator=(ImageFormatListCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageFormatListCreateInfo &operator=(VkImageFormatListCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo &setViewFormatCount(uint32_t viewFormatCount_) VULKAN_HPP_NOEXCEPT { viewFormatCount = viewFormatCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo &setPViewFormats(const VULKAN_HPP_NAMESPACE::Format *pViewFormats_) VULKAN_HPP_NOEXCEPT { pViewFormats = pViewFormats_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) ImageFormatListCreateInfo & setViewFormats(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &viewFormats_) VULKAN_HPP_NOEXCEPT { viewFormatCount = static_cast(viewFormats_.size()); pViewFormats = viewFormats_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImageFormatListCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImageFormatListCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, viewFormatCount, pViewFormats); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImageFormatListCreateInfo const &) const = default; #else bool operator==(ImageFormatListCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (viewFormatCount == rhs.viewFormatCount) && (pViewFormats == rhs.pViewFormats); # endif } bool operator!=(ImageFormatListCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatListCreateInfo; const void *pNext = {}; uint32_t viewFormatCount = {}; const VULKAN_HPP_NAMESPACE::Format *pViewFormats = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo) == sizeof(VkImageFormatListCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImageFormatListCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImageFormatListCreateInfo; }; using ImageFormatListCreateInfoKHR = ImageFormatListCreateInfo; struct ImageFormatProperties2 { using NativeType = VkImageFormatProperties2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatProperties2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImageFormatProperties2(VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), imageFormatProperties(imageFormatProperties_) { } VULKAN_HPP_CONSTEXPR ImageFormatProperties2(ImageFormatProperties2 const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageFormatProperties2(VkImageFormatProperties2 const &rhs) VULKAN_HPP_NOEXCEPT : ImageFormatProperties2(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageFormatProperties2 &operator=(ImageFormatProperties2 const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageFormatProperties2 &operator=(VkImageFormatProperties2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, imageFormatProperties); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImageFormatProperties2 const &) const = default; #else bool operator==(ImageFormatProperties2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (imageFormatProperties == rhs.imageFormatProperties); # endif } bool operator!=(ImageFormatProperties2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatProperties2; void *pNext = {}; VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImageFormatProperties2) == sizeof(VkImageFormatProperties2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImageFormatProperties2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImageFormatProperties2; }; using ImageFormatProperties2KHR = ImageFormatProperties2; struct ImageMemoryBarrier { using NativeType = VkImageMemoryBarrier; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImageMemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), srcAccessMask(srcAccessMask_), dstAccessMask(dstAccessMask_), oldLayout(oldLayout_), newLayout(newLayout_), srcQueueFamilyIndex(srcQueueFamilyIndex_), dstQueueFamilyIndex(dstQueueFamilyIndex_), image(image_), subresourceRange(subresourceRange_) { } VULKAN_HPP_CONSTEXPR ImageMemoryBarrier(ImageMemoryBarrier const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageMemoryBarrier(VkImageMemoryBarrier const &rhs) VULKAN_HPP_NOEXCEPT : ImageMemoryBarrier(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageMemoryBarrier &operator=(ImageMemoryBarrier const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageMemoryBarrier &operator=(VkImageMemoryBarrier const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier &setSrcAccessMask(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_) VULKAN_HPP_NOEXCEPT { srcAccessMask = srcAccessMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier &setDstAccessMask(VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_) VULKAN_HPP_NOEXCEPT { dstAccessMask = dstAccessMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier &setOldLayout(VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_) VULKAN_HPP_NOEXCEPT { oldLayout = oldLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier &setNewLayout(VULKAN_HPP_NAMESPACE::ImageLayout newLayout_) VULKAN_HPP_NOEXCEPT { newLayout = newLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier &setSrcQueueFamilyIndex(uint32_t srcQueueFamilyIndex_) VULKAN_HPP_NOEXCEPT { srcQueueFamilyIndex = srcQueueFamilyIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier &setDstQueueFamilyIndex(uint32_t dstQueueFamilyIndex_) VULKAN_HPP_NOEXCEPT { dstQueueFamilyIndex = dstQueueFamilyIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier &setImage(VULKAN_HPP_NAMESPACE::Image image_) VULKAN_HPP_NOEXCEPT { image = image_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setSubresourceRange(VULKAN_HPP_NAMESPACE::ImageSubresourceRange const &subresourceRange_) VULKAN_HPP_NOEXCEPT { subresourceRange = subresourceRange_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImageMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, srcAccessMask, dstAccessMask, oldLayout, newLayout, srcQueueFamilyIndex, dstQueueFamilyIndex, image, subresourceRange); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImageMemoryBarrier const &) const = default; #else bool operator==(ImageMemoryBarrier const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (srcAccessMask == rhs.srcAccessMask) && (dstAccessMask == rhs.dstAccessMask) && (oldLayout == rhs.oldLayout) && (newLayout == rhs.newLayout) && (srcQueueFamilyIndex == rhs.srcQueueFamilyIndex) && (dstQueueFamilyIndex == rhs.dstQueueFamilyIndex) && (image == rhs.image) && (subresourceRange == rhs.subresourceRange); # endif } bool operator!=(ImageMemoryBarrier const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier; const void *pNext = {}; VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; uint32_t srcQueueFamilyIndex = {}; uint32_t dstQueueFamilyIndex = {}; VULKAN_HPP_NAMESPACE::Image image = {}; VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImageMemoryBarrier) == sizeof(VkImageMemoryBarrier), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImageMemoryBarrier is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImageMemoryBarrier; }; struct ImageMemoryRequirementsInfo2 { using NativeType = VkImageMemoryRequirementsInfo2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryRequirementsInfo2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2(VULKAN_HPP_NAMESPACE::Image image_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), image(image_) { } VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2(ImageMemoryRequirementsInfo2 const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageMemoryRequirementsInfo2(VkImageMemoryRequirementsInfo2 const &rhs) VULKAN_HPP_NOEXCEPT : ImageMemoryRequirementsInfo2(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageMemoryRequirementsInfo2 &operator=(ImageMemoryRequirementsInfo2 const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageMemoryRequirementsInfo2 &operator=(VkImageMemoryRequirementsInfo2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 &setImage(VULKAN_HPP_NAMESPACE::Image image_) VULKAN_HPP_NOEXCEPT { image = image_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImageMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImageMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, image); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImageMemoryRequirementsInfo2 const &) const = default; #else bool operator==(ImageMemoryRequirementsInfo2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (image == rhs.image); # endif } bool operator!=(ImageMemoryRequirementsInfo2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryRequirementsInfo2; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Image image = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2) == sizeof(VkImageMemoryRequirementsInfo2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImageMemoryRequirementsInfo2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImageMemoryRequirementsInfo2; }; using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2; #if defined(VK_USE_PLATFORM_FUCHSIA) struct ImagePipeSurfaceCreateInfoFUCHSIA { using NativeType = VkImagePipeSurfaceCreateInfoFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA(VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = {}, zx_handle_t imagePipeHandle_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), imagePipeHandle(imagePipeHandle_) { } VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA(ImagePipeSurfaceCreateInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; ImagePipeSurfaceCreateInfoFUCHSIA(VkImagePipeSurfaceCreateInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT : ImagePipeSurfaceCreateInfoFUCHSIA(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImagePipeSurfaceCreateInfoFUCHSIA &operator=(ImagePipeSurfaceCreateInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; ImagePipeSurfaceCreateInfoFUCHSIA &operator=(VkImagePipeSurfaceCreateInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA &setFlags(VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA &setImagePipeHandle(zx_handle_t imagePipeHandle_) VULKAN_HPP_NOEXCEPT { imagePipeHandle = imagePipeHandle_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImagePipeSurfaceCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImagePipeSurfaceCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, imagePipeHandle); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) std::strong_ordering operator<=>(ImagePipeSurfaceCreateInfoFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { if(auto cmp = sType <=> rhs.sType; cmp != 0) return cmp; if(auto cmp = pNext <=> rhs.pNext; cmp != 0) return cmp; if(auto cmp = flags <=> rhs.flags; cmp != 0) return cmp; if(auto cmp = memcmp(&imagePipeHandle, &rhs.imagePipeHandle, sizeof(zx_handle_t)); cmp != 0) return (cmp < 0) ? std::strong_ordering::less : std::strong_ordering::greater; return std::strong_ordering::equivalent; } # endif bool operator==(ImagePipeSurfaceCreateInfoFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (memcmp(&imagePipeHandle, &rhs.imagePipeHandle, sizeof(zx_handle_t)) == 0); } bool operator!=(ImagePipeSurfaceCreateInfoFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags = {}; zx_handle_t imagePipeHandle = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA) == sizeof(VkImagePipeSurfaceCreateInfoFUCHSIA), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImagePipeSurfaceCreateInfoFUCHSIA is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImagePipeSurfaceCreateInfoFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ struct ImagePlaneMemoryRequirementsInfo { using NativeType = VkImagePlaneMemoryRequirementsInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagePlaneMemoryRequirementsInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo(VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), planeAspect(planeAspect_) { } VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo(ImagePlaneMemoryRequirementsInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; ImagePlaneMemoryRequirementsInfo(VkImagePlaneMemoryRequirementsInfo const &rhs) VULKAN_HPP_NOEXCEPT : ImagePlaneMemoryRequirementsInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImagePlaneMemoryRequirementsInfo &operator=(ImagePlaneMemoryRequirementsInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; ImagePlaneMemoryRequirementsInfo &operator=(VkImagePlaneMemoryRequirementsInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo &setPlaneAspect(VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_) VULKAN_HPP_NOEXCEPT { planeAspect = planeAspect_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImagePlaneMemoryRequirementsInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImagePlaneMemoryRequirementsInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, planeAspect); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImagePlaneMemoryRequirementsInfo const &) const = default; #else bool operator==(ImagePlaneMemoryRequirementsInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (planeAspect == rhs.planeAspect); # endif } bool operator!=(ImagePlaneMemoryRequirementsInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo) == sizeof(VkImagePlaneMemoryRequirementsInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImagePlaneMemoryRequirementsInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImagePlaneMemoryRequirementsInfo; }; using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo; struct ImageResolve { using NativeType = VkImageResolve; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImageResolve(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT : srcSubresource(srcSubresource_), srcOffset(srcOffset_), dstSubresource(dstSubresource_), dstOffset(dstOffset_), extent(extent_) { } VULKAN_HPP_CONSTEXPR ImageResolve(ImageResolve const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageResolve(VkImageResolve const &rhs) VULKAN_HPP_NOEXCEPT : ImageResolve(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageResolve &operator=(ImageResolve const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageResolve &operator=(VkImageResolve const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImageResolve &setSrcSubresource(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &srcSubresource_) VULKAN_HPP_NOEXCEPT { srcSubresource = srcSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageResolve &setSrcOffset(VULKAN_HPP_NAMESPACE::Offset3D const &srcOffset_) VULKAN_HPP_NOEXCEPT { srcOffset = srcOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageResolve &setDstSubresource(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &dstSubresource_) VULKAN_HPP_NOEXCEPT { dstSubresource = dstSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageResolve &setDstOffset(VULKAN_HPP_NAMESPACE::Offset3D const &dstOffset_) VULKAN_HPP_NOEXCEPT { dstOffset = dstOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageResolve &setExtent(VULKAN_HPP_NAMESPACE::Extent3D const &extent_) VULKAN_HPP_NOEXCEPT { extent = extent_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImageResolve const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImageResolve &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(srcSubresource, srcOffset, dstSubresource, dstOffset, extent); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImageResolve const &) const = default; #else bool operator==(ImageResolve const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (srcSubresource == rhs.srcSubresource) && (srcOffset == rhs.srcOffset) && (dstSubresource == rhs.dstSubresource) && (dstOffset == rhs.dstOffset) && (extent == rhs.extent); # endif } bool operator!=(ImageResolve const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; VULKAN_HPP_NAMESPACE::Extent3D extent = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImageResolve) == sizeof(VkImageResolve), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImageResolve is not nothrow_move_constructible!"); struct ImageResolve2 { using NativeType = VkImageResolve2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageResolve2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImageResolve2(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), srcSubresource(srcSubresource_), srcOffset(srcOffset_), dstSubresource(dstSubresource_), dstOffset(dstOffset_), extent(extent_) { } VULKAN_HPP_CONSTEXPR ImageResolve2(ImageResolve2 const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageResolve2(VkImageResolve2 const &rhs) VULKAN_HPP_NOEXCEPT : ImageResolve2(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageResolve2 &operator=(ImageResolve2 const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageResolve2 &operator=(VkImageResolve2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImageResolve2 &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageResolve2 &setSrcSubresource(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &srcSubresource_) VULKAN_HPP_NOEXCEPT { srcSubresource = srcSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageResolve2 &setSrcOffset(VULKAN_HPP_NAMESPACE::Offset3D const &srcOffset_) VULKAN_HPP_NOEXCEPT { srcOffset = srcOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageResolve2 &setDstSubresource(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &dstSubresource_) VULKAN_HPP_NOEXCEPT { dstSubresource = dstSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageResolve2 &setDstOffset(VULKAN_HPP_NAMESPACE::Offset3D const &dstOffset_) VULKAN_HPP_NOEXCEPT { dstOffset = dstOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageResolve2 &setExtent(VULKAN_HPP_NAMESPACE::Extent3D const &extent_) VULKAN_HPP_NOEXCEPT { extent = extent_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImageResolve2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImageResolve2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, srcSubresource, srcOffset, dstSubresource, dstOffset, extent); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImageResolve2 const &) const = default; #else bool operator==(ImageResolve2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (srcSubresource == rhs.srcSubresource) && (srcOffset == rhs.srcOffset) && (dstSubresource == rhs.dstSubresource) && (dstOffset == rhs.dstOffset) && (extent == rhs.extent); # endif } bool operator!=(ImageResolve2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageResolve2; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; VULKAN_HPP_NAMESPACE::Extent3D extent = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImageResolve2) == sizeof(VkImageResolve2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImageResolve2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImageResolve2; }; using ImageResolve2KHR = ImageResolve2; struct ImageSparseMemoryRequirementsInfo2 { using NativeType = VkImageSparseMemoryRequirementsInfo2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSparseMemoryRequirementsInfo2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2(VULKAN_HPP_NAMESPACE::Image image_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), image(image_) { } VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2(ImageSparseMemoryRequirementsInfo2 const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageSparseMemoryRequirementsInfo2(VkImageSparseMemoryRequirementsInfo2 const &rhs) VULKAN_HPP_NOEXCEPT : ImageSparseMemoryRequirementsInfo2(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageSparseMemoryRequirementsInfo2 &operator=(ImageSparseMemoryRequirementsInfo2 const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageSparseMemoryRequirementsInfo2 &operator=(VkImageSparseMemoryRequirementsInfo2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImageSparseMemoryRequirementsInfo2 &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageSparseMemoryRequirementsInfo2 &setImage(VULKAN_HPP_NAMESPACE::Image image_) VULKAN_HPP_NOEXCEPT { image = image_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImageSparseMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImageSparseMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, image); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImageSparseMemoryRequirementsInfo2 const &) const = default; #else bool operator==(ImageSparseMemoryRequirementsInfo2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (image == rhs.image); # endif } bool operator!=(ImageSparseMemoryRequirementsInfo2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Image image = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2) == sizeof(VkImageSparseMemoryRequirementsInfo2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImageSparseMemoryRequirementsInfo2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImageSparseMemoryRequirementsInfo2; }; using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2; struct ImageStencilUsageCreateInfo { using NativeType = VkImageStencilUsageCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageStencilUsageCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo(VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), stencilUsage(stencilUsage_) { } VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo(ImageStencilUsageCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageStencilUsageCreateInfo(VkImageStencilUsageCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : ImageStencilUsageCreateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageStencilUsageCreateInfo &operator=(ImageStencilUsageCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageStencilUsageCreateInfo &operator=(VkImageStencilUsageCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo &setStencilUsage(VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_) VULKAN_HPP_NOEXCEPT { stencilUsage = stencilUsage_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImageStencilUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImageStencilUsageCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, stencilUsage); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImageStencilUsageCreateInfo const &) const = default; #else bool operator==(ImageStencilUsageCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (stencilUsage == rhs.stencilUsage); # endif } bool operator!=(ImageStencilUsageCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageStencilUsageCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo) == sizeof(VkImageStencilUsageCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImageStencilUsageCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImageStencilUsageCreateInfo; }; using ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo; struct ImageSwapchainCreateInfoKHR { using NativeType = VkImageSwapchainCreateInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSwapchainCreateInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), swapchain(swapchain_) { } VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR(ImageSwapchainCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageSwapchainCreateInfoKHR(VkImageSwapchainCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : ImageSwapchainCreateInfoKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageSwapchainCreateInfoKHR &operator=(ImageSwapchainCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageSwapchainCreateInfoKHR &operator=(VkImageSwapchainCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR &setSwapchain(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_) VULKAN_HPP_NOEXCEPT { swapchain = swapchain_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImageSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImageSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, swapchain); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImageSwapchainCreateInfoKHR const &) const = default; #else bool operator==(ImageSwapchainCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (swapchain == rhs.swapchain); # endif } bool operator!=(ImageSwapchainCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSwapchainCreateInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR) == sizeof(VkImageSwapchainCreateInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImageSwapchainCreateInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImageSwapchainCreateInfoKHR; }; struct ImageViewASTCDecodeModeEXT { using NativeType = VkImageViewASTCDecodeModeEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAstcDecodeModeEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT(VULKAN_HPP_NAMESPACE::Format decodeMode_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), decodeMode(decodeMode_) { } VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT(ImageViewASTCDecodeModeEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageViewASTCDecodeModeEXT(VkImageViewASTCDecodeModeEXT const &rhs) VULKAN_HPP_NOEXCEPT : ImageViewASTCDecodeModeEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageViewASTCDecodeModeEXT &operator=(ImageViewASTCDecodeModeEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageViewASTCDecodeModeEXT &operator=(VkImageViewASTCDecodeModeEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT &setDecodeMode(VULKAN_HPP_NAMESPACE::Format decodeMode_) VULKAN_HPP_NOEXCEPT { decodeMode = decodeMode_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImageViewASTCDecodeModeEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImageViewASTCDecodeModeEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, decodeMode); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImageViewASTCDecodeModeEXT const &) const = default; #else bool operator==(ImageViewASTCDecodeModeEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (decodeMode == rhs.decodeMode); # endif } bool operator!=(ImageViewASTCDecodeModeEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAstcDecodeModeEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Format decodeMode = VULKAN_HPP_NAMESPACE::Format::eUndefined; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT) == sizeof(VkImageViewASTCDecodeModeEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImageViewASTCDecodeModeEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImageViewASTCDecodeModeEXT; }; struct ImageViewAddressPropertiesNVX { using NativeType = VkImageViewAddressPropertiesNVX; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAddressPropertiesNVX; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), deviceAddress(deviceAddress_), size(size_) { } VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX(ImageViewAddressPropertiesNVX const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageViewAddressPropertiesNVX(VkImageViewAddressPropertiesNVX const &rhs) VULKAN_HPP_NOEXCEPT : ImageViewAddressPropertiesNVX(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageViewAddressPropertiesNVX &operator=(ImageViewAddressPropertiesNVX const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageViewAddressPropertiesNVX &operator=(VkImageViewAddressPropertiesNVX const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkImageViewAddressPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImageViewAddressPropertiesNVX &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std:: tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, deviceAddress, size); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImageViewAddressPropertiesNVX const &) const = default; #else bool operator==(ImageViewAddressPropertiesNVX const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (deviceAddress == rhs.deviceAddress) && (size == rhs.size); # endif } bool operator!=(ImageViewAddressPropertiesNVX const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAddressPropertiesNVX; void *pNext = {}; VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; VULKAN_HPP_NAMESPACE::DeviceSize size = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX) == sizeof(VkImageViewAddressPropertiesNVX), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImageViewAddressPropertiesNVX is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImageViewAddressPropertiesNVX; }; struct ImageViewCreateInfo { using NativeType = VkImageViewCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImageViewCreateInfo(VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageViewType viewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), image(image_), viewType(viewType_), format(format_), components(components_), subresourceRange(subresourceRange_) { } VULKAN_HPP_CONSTEXPR ImageViewCreateInfo(ImageViewCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageViewCreateInfo(VkImageViewCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : ImageViewCreateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageViewCreateInfo &operator=(ImageViewCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageViewCreateInfo &operator=(VkImageViewCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo &setFlags(VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo &setImage(VULKAN_HPP_NAMESPACE::Image image_) VULKAN_HPP_NOEXCEPT { image = image_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo &setViewType(VULKAN_HPP_NAMESPACE::ImageViewType viewType_) VULKAN_HPP_NOEXCEPT { viewType = viewType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo &setFormat(VULKAN_HPP_NAMESPACE::Format format_) VULKAN_HPP_NOEXCEPT { format = format_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo &setComponents(VULKAN_HPP_NAMESPACE::ComponentMapping const &components_) VULKAN_HPP_NOEXCEPT { components = components_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setSubresourceRange(VULKAN_HPP_NAMESPACE::ImageSubresourceRange const &subresourceRange_) VULKAN_HPP_NOEXCEPT { subresourceRange = subresourceRange_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImageViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImageViewCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, image, viewType, format, components, subresourceRange); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImageViewCreateInfo const &) const = default; #else bool operator==(ImageViewCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (image == rhs.image) && (viewType == rhs.viewType) && (format == rhs.format) && (components == rhs.components) && (subresourceRange == rhs.subresourceRange); # endif } bool operator!=(ImageViewCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags = {}; VULKAN_HPP_NAMESPACE::Image image = {}; VULKAN_HPP_NAMESPACE::ImageViewType viewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D; VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImageViewCreateInfo) == sizeof(VkImageViewCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImageViewCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImageViewCreateInfo; }; struct ImageViewHandleInfoNVX { using NativeType = VkImageViewHandleInfoNVX; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewHandleInfoNVX; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX(VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), imageView(imageView_), descriptorType(descriptorType_), sampler(sampler_) { } VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX(ImageViewHandleInfoNVX const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageViewHandleInfoNVX(VkImageViewHandleInfoNVX const &rhs) VULKAN_HPP_NOEXCEPT : ImageViewHandleInfoNVX(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageViewHandleInfoNVX &operator=(ImageViewHandleInfoNVX const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageViewHandleInfoNVX &operator=(VkImageViewHandleInfoNVX const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX &setImageView(VULKAN_HPP_NAMESPACE::ImageView imageView_) VULKAN_HPP_NOEXCEPT { imageView = imageView_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX &setDescriptorType(VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_) VULKAN_HPP_NOEXCEPT { descriptorType = descriptorType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX &setSampler(VULKAN_HPP_NAMESPACE::Sampler sampler_) VULKAN_HPP_NOEXCEPT { sampler = sampler_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImageViewHandleInfoNVX const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImageViewHandleInfoNVX &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, imageView, descriptorType, sampler); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImageViewHandleInfoNVX const &) const = default; #else bool operator==(ImageViewHandleInfoNVX const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (imageView == rhs.imageView) && (descriptorType == rhs.descriptorType) && (sampler == rhs.sampler); # endif } bool operator!=(ImageViewHandleInfoNVX const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewHandleInfoNVX; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ImageView imageView = {}; VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; VULKAN_HPP_NAMESPACE::Sampler sampler = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX) == sizeof(VkImageViewHandleInfoNVX), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImageViewHandleInfoNVX is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImageViewHandleInfoNVX; }; struct ImageViewMinLodCreateInfoEXT { using NativeType = VkImageViewMinLodCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewMinLodCreateInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImageViewMinLodCreateInfoEXT(float minLod_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), minLod(minLod_) { } VULKAN_HPP_CONSTEXPR ImageViewMinLodCreateInfoEXT(ImageViewMinLodCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageViewMinLodCreateInfoEXT(VkImageViewMinLodCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : ImageViewMinLodCreateInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageViewMinLodCreateInfoEXT &operator=(ImageViewMinLodCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageViewMinLodCreateInfoEXT &operator=(VkImageViewMinLodCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImageViewMinLodCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewMinLodCreateInfoEXT &setMinLod(float minLod_) VULKAN_HPP_NOEXCEPT { minLod = minLod_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImageViewMinLodCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImageViewMinLodCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, minLod); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImageViewMinLodCreateInfoEXT const &) const = default; #else bool operator==(ImageViewMinLodCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (minLod == rhs.minLod); # endif } bool operator!=(ImageViewMinLodCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewMinLodCreateInfoEXT; const void *pNext = {}; float minLod = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT) == sizeof(VkImageViewMinLodCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImageViewMinLodCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImageViewMinLodCreateInfoEXT; }; struct ImageViewUsageCreateInfo { using NativeType = VkImageViewUsageCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewUsageCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo(VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), usage(usage_) { } VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo(ImageViewUsageCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageViewUsageCreateInfo(VkImageViewUsageCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : ImageViewUsageCreateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageViewUsageCreateInfo &operator=(ImageViewUsageCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; ImageViewUsageCreateInfo &operator=(VkImageViewUsageCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo &setUsage(VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_) VULKAN_HPP_NOEXCEPT { usage = usage_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImageViewUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImageViewUsageCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, usage); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImageViewUsageCreateInfo const &) const = default; #else bool operator==(ImageViewUsageCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (usage == rhs.usage); # endif } bool operator!=(ImageViewUsageCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewUsageCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo) == sizeof(VkImageViewUsageCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImageViewUsageCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImageViewUsageCreateInfo; }; using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo; #if defined(VK_USE_PLATFORM_ANDROID_KHR) struct ImportAndroidHardwareBufferInfoANDROID { using NativeType = VkImportAndroidHardwareBufferInfoANDROID; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportAndroidHardwareBufferInfoANDROID; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID(struct AHardwareBuffer *buffer_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), buffer(buffer_) { } VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID(ImportAndroidHardwareBufferInfoANDROID const &rhs) VULKAN_HPP_NOEXCEPT = default; ImportAndroidHardwareBufferInfoANDROID(VkImportAndroidHardwareBufferInfoANDROID const &rhs) VULKAN_HPP_NOEXCEPT : ImportAndroidHardwareBufferInfoANDROID(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImportAndroidHardwareBufferInfoANDROID &operator=(ImportAndroidHardwareBufferInfoANDROID const &rhs) VULKAN_HPP_NOEXCEPT = default; ImportAndroidHardwareBufferInfoANDROID &operator=(VkImportAndroidHardwareBufferInfoANDROID const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImportAndroidHardwareBufferInfoANDROID &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportAndroidHardwareBufferInfoANDROID &setBuffer(struct AHardwareBuffer *buffer_) VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImportAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImportAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, buffer); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImportAndroidHardwareBufferInfoANDROID const &) const = default; # else bool operator==(ImportAndroidHardwareBufferInfoANDROID const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (buffer == rhs.buffer); # endif } bool operator!=(ImportAndroidHardwareBufferInfoANDROID const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportAndroidHardwareBufferInfoANDROID; const void *pNext = {}; struct AHardwareBuffer *buffer = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID) == sizeof(VkImportAndroidHardwareBufferInfoANDROID), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImportAndroidHardwareBufferInfoANDROID is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImportAndroidHardwareBufferInfoANDROID; }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ struct ImportFenceFdInfoKHR { using NativeType = VkImportFenceFdInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceFdInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, int fd_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), fence(fence_), flags(flags_), handleType(handleType_), fd(fd_) { } VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR(ImportFenceFdInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; ImportFenceFdInfoKHR(VkImportFenceFdInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : ImportFenceFdInfoKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImportFenceFdInfoKHR &operator=(ImportFenceFdInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; ImportFenceFdInfoKHR &operator=(VkImportFenceFdInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR &setFence(VULKAN_HPP_NAMESPACE::Fence fence_) VULKAN_HPP_NOEXCEPT { fence = fence_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR &setFlags(VULKAN_HPP_NAMESPACE::FenceImportFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR &setHandleType(VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR &setFd(int fd_) VULKAN_HPP_NOEXCEPT { fd = fd_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImportFenceFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImportFenceFdInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, fence, flags, handleType, fd); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImportFenceFdInfoKHR const &) const = default; #else bool operator==(ImportFenceFdInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (fence == rhs.fence) && (flags == rhs.flags) && (handleType == rhs.handleType) && (fd == rhs.fd); # endif } bool operator!=(ImportFenceFdInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceFdInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Fence fence = {}; VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {}; VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; int fd = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR) == sizeof(VkImportFenceFdInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImportFenceFdInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImportFenceFdInfoKHR; }; #if defined(VK_USE_PLATFORM_WIN32_KHR) struct ImportFenceWin32HandleInfoKHR { using NativeType = VkImportFenceWin32HandleInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceWin32HandleInfoKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = {}, LPCWSTR name_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), fence(fence_), flags(flags_), handleType(handleType_), handle(handle_), name(name_) { } VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR(ImportFenceWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; ImportFenceWin32HandleInfoKHR(VkImportFenceWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : ImportFenceWin32HandleInfoKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImportFenceWin32HandleInfoKHR &operator=(ImportFenceWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; ImportFenceWin32HandleInfoKHR &operator=(VkImportFenceWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR &setFence(VULKAN_HPP_NAMESPACE::Fence fence_) VULKAN_HPP_NOEXCEPT { fence = fence_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR &setFlags(VULKAN_HPP_NAMESPACE::FenceImportFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setHandleType(VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR &setHandle(HANDLE handle_) VULKAN_HPP_NOEXCEPT { handle = handle_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR &setName(LPCWSTR name_) VULKAN_HPP_NOEXCEPT { name = name_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, fence, flags, handleType, handle, name); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImportFenceWin32HandleInfoKHR const &) const = default; # else bool operator==(ImportFenceWin32HandleInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (fence == rhs.fence) && (flags == rhs.flags) && (handleType == rhs.handleType) && (handle == rhs.handle) && (name == rhs.name); # endif } bool operator!=(ImportFenceWin32HandleInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Fence fence = {}; VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {}; VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; HANDLE handle = {}; LPCWSTR name = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR) == sizeof(VkImportFenceWin32HandleInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImportFenceWin32HandleInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImportFenceWin32HandleInfoKHR; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #if defined(VK_USE_PLATFORM_FUCHSIA) struct ImportMemoryBufferCollectionFUCHSIA { using NativeType = VkImportMemoryBufferCollectionFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryBufferCollectionFUCHSIA; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImportMemoryBufferCollectionFUCHSIA(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {}, uint32_t index_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), collection(collection_), index(index_) { } VULKAN_HPP_CONSTEXPR ImportMemoryBufferCollectionFUCHSIA(ImportMemoryBufferCollectionFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; ImportMemoryBufferCollectionFUCHSIA(VkImportMemoryBufferCollectionFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT : ImportMemoryBufferCollectionFUCHSIA(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImportMemoryBufferCollectionFUCHSIA &operator=(ImportMemoryBufferCollectionFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; ImportMemoryBufferCollectionFUCHSIA &operator=(VkImportMemoryBufferCollectionFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA &setCollection(VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_) VULKAN_HPP_NOEXCEPT { collection = collection_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA &setIndex(uint32_t index_) VULKAN_HPP_NOEXCEPT { index = index_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImportMemoryBufferCollectionFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImportMemoryBufferCollectionFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, collection, index); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImportMemoryBufferCollectionFUCHSIA const &) const = default; # else bool operator==(ImportMemoryBufferCollectionFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (collection == rhs.collection) && (index == rhs.index); # endif } bool operator!=(ImportMemoryBufferCollectionFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryBufferCollectionFUCHSIA; const void *pNext = {}; VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection = {}; uint32_t index = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA) == sizeof(VkImportMemoryBufferCollectionFUCHSIA), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImportMemoryBufferCollectionFUCHSIA is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImportMemoryBufferCollectionFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ struct ImportMemoryFdInfoKHR { using NativeType = VkImportMemoryFdInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryFdInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, int fd_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), handleType(handleType_), fd(fd_) { } VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR(ImportMemoryFdInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; ImportMemoryFdInfoKHR(VkImportMemoryFdInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : ImportMemoryFdInfoKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImportMemoryFdInfoKHR &operator=(ImportMemoryFdInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; ImportMemoryFdInfoKHR &operator=(VkImportMemoryFdInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR &setHandleType(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR &setFd(int fd_) VULKAN_HPP_NOEXCEPT { fd = fd_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImportMemoryFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImportMemoryFdInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, handleType, fd); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImportMemoryFdInfoKHR const &) const = default; #else bool operator==(ImportMemoryFdInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (handleType == rhs.handleType) && (fd == rhs.fd); # endif } bool operator!=(ImportMemoryFdInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryFdInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; int fd = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR) == sizeof(VkImportMemoryFdInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImportMemoryFdInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImportMemoryFdInfoKHR; }; struct ImportMemoryHostPointerInfoEXT { using NativeType = VkImportMemoryHostPointerInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryHostPointerInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, void *pHostPointer_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), handleType(handleType_), pHostPointer(pHostPointer_) { } VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT(ImportMemoryHostPointerInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; ImportMemoryHostPointerInfoEXT(VkImportMemoryHostPointerInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : ImportMemoryHostPointerInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImportMemoryHostPointerInfoEXT &operator=(ImportMemoryHostPointerInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; ImportMemoryHostPointerInfoEXT &operator=(VkImportMemoryHostPointerInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & setHandleType(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT &setPHostPointer(void *pHostPointer_) VULKAN_HPP_NOEXCEPT { pHostPointer = pHostPointer_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImportMemoryHostPointerInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImportMemoryHostPointerInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std:: tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, handleType, pHostPointer); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImportMemoryHostPointerInfoEXT const &) const = default; #else bool operator==(ImportMemoryHostPointerInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (handleType == rhs.handleType) && (pHostPointer == rhs.pHostPointer); # endif } bool operator!=(ImportMemoryHostPointerInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; void *pHostPointer = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT) == sizeof(VkImportMemoryHostPointerInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImportMemoryHostPointerInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImportMemoryHostPointerInfoEXT; }; #if defined(VK_USE_PLATFORM_WIN32_KHR) struct ImportMemoryWin32HandleInfoKHR { using NativeType = VkImportMemoryWin32HandleInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = {}, LPCWSTR name_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), handleType(handleType_), handle(handle_), name(name_) { } VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR(ImportMemoryWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; ImportMemoryWin32HandleInfoKHR(VkImportMemoryWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : ImportMemoryWin32HandleInfoKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImportMemoryWin32HandleInfoKHR &operator=(ImportMemoryWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; ImportMemoryWin32HandleInfoKHR &operator=(VkImportMemoryWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setHandleType(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR &setHandle(HANDLE handle_) VULKAN_HPP_NOEXCEPT { handle = handle_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR &setName(LPCWSTR name_) VULKAN_HPP_NOEXCEPT { name = name_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, handleType, handle, name); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImportMemoryWin32HandleInfoKHR const &) const = default; # else bool operator==(ImportMemoryWin32HandleInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (handleType == rhs.handleType) && (handle == rhs.handle) && (name == rhs.name); # endif } bool operator!=(ImportMemoryWin32HandleInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; HANDLE handle = {}; LPCWSTR name = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR) == sizeof(VkImportMemoryWin32HandleInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImportMemoryWin32HandleInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImportMemoryWin32HandleInfoKHR; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #if defined(VK_USE_PLATFORM_WIN32_KHR) struct ImportMemoryWin32HandleInfoNV { using NativeType = VkImportMemoryWin32HandleInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoNV; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ = {}, HANDLE handle_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), handleType(handleType_), handle(handle_) { } VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV(ImportMemoryWin32HandleInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; ImportMemoryWin32HandleInfoNV(VkImportMemoryWin32HandleInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : ImportMemoryWin32HandleInfoNV(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImportMemoryWin32HandleInfoNV &operator=(ImportMemoryWin32HandleInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; ImportMemoryWin32HandleInfoNV &operator=(VkImportMemoryWin32HandleInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & setHandleType(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV &setHandle(HANDLE handle_) VULKAN_HPP_NOEXCEPT { handle = handle_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, handleType, handle); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImportMemoryWin32HandleInfoNV const &) const = default; # else bool operator==(ImportMemoryWin32HandleInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (handleType == rhs.handleType) && (handle == rhs.handle); # endif } bool operator!=(ImportMemoryWin32HandleInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType = {}; HANDLE handle = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV) == sizeof(VkImportMemoryWin32HandleInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImportMemoryWin32HandleInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImportMemoryWin32HandleInfoNV; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #if defined(VK_USE_PLATFORM_FUCHSIA) struct ImportMemoryZirconHandleInfoFUCHSIA { using NativeType = VkImportMemoryZirconHandleInfoFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryZirconHandleInfoFUCHSIA; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImportMemoryZirconHandleInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, zx_handle_t handle_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), handleType(handleType_), handle(handle_) { } VULKAN_HPP_CONSTEXPR ImportMemoryZirconHandleInfoFUCHSIA(ImportMemoryZirconHandleInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; ImportMemoryZirconHandleInfoFUCHSIA(VkImportMemoryZirconHandleInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT : ImportMemoryZirconHandleInfoFUCHSIA(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImportMemoryZirconHandleInfoFUCHSIA &operator=(ImportMemoryZirconHandleInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; ImportMemoryZirconHandleInfoFUCHSIA &operator=(VkImportMemoryZirconHandleInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & setHandleType(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA &setHandle(zx_handle_t handle_) VULKAN_HPP_NOEXCEPT { handle = handle_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImportMemoryZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImportMemoryZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, handleType, handle); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) std::strong_ordering operator<=>(ImportMemoryZirconHandleInfoFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { if(auto cmp = sType <=> rhs.sType; cmp != 0) return cmp; if(auto cmp = pNext <=> rhs.pNext; cmp != 0) return cmp; if(auto cmp = handleType <=> rhs.handleType; cmp != 0) return cmp; if(auto cmp = memcmp(&handle, &rhs.handle, sizeof(zx_handle_t)); cmp != 0) return (cmp < 0) ? std::strong_ordering::less : std::strong_ordering::greater; return std::strong_ordering::equivalent; } # endif bool operator==(ImportMemoryZirconHandleInfoFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { return (sType == rhs.sType) && (pNext == rhs.pNext) && (handleType == rhs.handleType) && (memcmp(&handle, &rhs.handle, sizeof(zx_handle_t)) == 0); } bool operator!=(ImportMemoryZirconHandleInfoFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryZirconHandleInfoFUCHSIA; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; zx_handle_t handle = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA) == sizeof(VkImportMemoryZirconHandleInfoFUCHSIA), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImportMemoryZirconHandleInfoFUCHSIA is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImportMemoryZirconHandleInfoFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ struct ImportSemaphoreFdInfoKHR { using NativeType = VkImportSemaphoreFdInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreFdInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, int fd_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), semaphore(semaphore_), flags(flags_), handleType(handleType_), fd(fd_) { } VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR(ImportSemaphoreFdInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; ImportSemaphoreFdInfoKHR(VkImportSemaphoreFdInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : ImportSemaphoreFdInfoKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImportSemaphoreFdInfoKHR &operator=(ImportSemaphoreFdInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; ImportSemaphoreFdInfoKHR &operator=(VkImportSemaphoreFdInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR &setSemaphore(VULKAN_HPP_NAMESPACE::Semaphore semaphore_) VULKAN_HPP_NOEXCEPT { semaphore = semaphore_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR &setFlags(VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setHandleType(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR &setFd(int fd_) VULKAN_HPP_NOEXCEPT { fd = fd_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImportSemaphoreFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImportSemaphoreFdInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, semaphore, flags, handleType, fd); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImportSemaphoreFdInfoKHR const &) const = default; #else bool operator==(ImportSemaphoreFdInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (semaphore == rhs.semaphore) && (flags == rhs.flags) && (handleType == rhs.handleType) && (fd == rhs.fd); # endif } bool operator!=(ImportSemaphoreFdInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreFdInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; int fd = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR) == sizeof(VkImportSemaphoreFdInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImportSemaphoreFdInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImportSemaphoreFdInfoKHR; }; #if defined(VK_USE_PLATFORM_WIN32_KHR) struct ImportSemaphoreWin32HandleInfoKHR { using NativeType = VkImportSemaphoreWin32HandleInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreWin32HandleInfoKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = {}, LPCWSTR name_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), semaphore(semaphore_), flags(flags_), handleType(handleType_), handle(handle_), name(name_) { } VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR(ImportSemaphoreWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; ImportSemaphoreWin32HandleInfoKHR(VkImportSemaphoreWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : ImportSemaphoreWin32HandleInfoKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImportSemaphoreWin32HandleInfoKHR &operator=(ImportSemaphoreWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; ImportSemaphoreWin32HandleInfoKHR &operator=(VkImportSemaphoreWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR &setSemaphore(VULKAN_HPP_NAMESPACE::Semaphore semaphore_) VULKAN_HPP_NOEXCEPT { semaphore = semaphore_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR &setFlags(VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setHandleType(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR &setHandle(HANDLE handle_) VULKAN_HPP_NOEXCEPT { handle = handle_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR &setName(LPCWSTR name_) VULKAN_HPP_NOEXCEPT { name = name_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, semaphore, flags, handleType, handle, name); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ImportSemaphoreWin32HandleInfoKHR const &) const = default; # else bool operator==(ImportSemaphoreWin32HandleInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (semaphore == rhs.semaphore) && (flags == rhs.flags) && (handleType == rhs.handleType) && (handle == rhs.handle) && (name == rhs.name); # endif } bool operator!=(ImportSemaphoreWin32HandleInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; HANDLE handle = {}; LPCWSTR name = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR) == sizeof(VkImportSemaphoreWin32HandleInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImportSemaphoreWin32HandleInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImportSemaphoreWin32HandleInfoKHR; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #if defined(VK_USE_PLATFORM_FUCHSIA) struct ImportSemaphoreZirconHandleInfoFUCHSIA { using NativeType = VkImportSemaphoreZirconHandleInfoFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ImportSemaphoreZirconHandleInfoFUCHSIA( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, zx_handle_t zirconHandle_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), semaphore(semaphore_), flags(flags_), handleType(handleType_), zirconHandle(zirconHandle_) { } VULKAN_HPP_CONSTEXPR ImportSemaphoreZirconHandleInfoFUCHSIA(ImportSemaphoreZirconHandleInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; ImportSemaphoreZirconHandleInfoFUCHSIA(VkImportSemaphoreZirconHandleInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT : ImportSemaphoreZirconHandleInfoFUCHSIA(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImportSemaphoreZirconHandleInfoFUCHSIA &operator=(ImportSemaphoreZirconHandleInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; ImportSemaphoreZirconHandleInfoFUCHSIA &operator=(VkImportSemaphoreZirconHandleInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA &setSemaphore(VULKAN_HPP_NAMESPACE::Semaphore semaphore_) VULKAN_HPP_NOEXCEPT { semaphore = semaphore_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA &setFlags(VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setHandleType(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA &setZirconHandle(zx_handle_t zirconHandle_) VULKAN_HPP_NOEXCEPT { zirconHandle = zirconHandle_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkImportSemaphoreZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkImportSemaphoreZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, semaphore, flags, handleType, zirconHandle); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) std::strong_ordering operator<=>(ImportSemaphoreZirconHandleInfoFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { if(auto cmp = sType <=> rhs.sType; cmp != 0) return cmp; if(auto cmp = pNext <=> rhs.pNext; cmp != 0) return cmp; if(auto cmp = semaphore <=> rhs.semaphore; cmp != 0) return cmp; if(auto cmp = flags <=> rhs.flags; cmp != 0) return cmp; if(auto cmp = handleType <=> rhs.handleType; cmp != 0) return cmp; if(auto cmp = memcmp(&zirconHandle, &rhs.zirconHandle, sizeof(zx_handle_t)); cmp != 0) return (cmp < 0) ? std::strong_ordering::less : std::strong_ordering::greater; return std::strong_ordering::equivalent; } # endif bool operator==(ImportSemaphoreZirconHandleInfoFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { return (sType == rhs.sType) && (pNext == rhs.pNext) && (semaphore == rhs.semaphore) && (flags == rhs.flags) && (handleType == rhs.handleType) && (memcmp(&zirconHandle, &rhs.zirconHandle, sizeof(zx_handle_t)) == 0); } bool operator!=(ImportSemaphoreZirconHandleInfoFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; zx_handle_t zirconHandle = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA) == sizeof(VkImportSemaphoreZirconHandleInfoFUCHSIA), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ImportSemaphoreZirconHandleInfoFUCHSIA is not nothrow_move_constructible!"); template<> struct CppType { using Type = ImportSemaphoreZirconHandleInfoFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ struct IndirectCommandsLayoutTokenNV { using NativeType = VkIndirectCommandsLayoutTokenNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutTokenNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup, uint32_t stream_ = {}, uint32_t offset_ = {}, uint32_t vertexBindingUnit_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ = {}, uint32_t pushconstantOffset_ = {}, uint32_t pushconstantSize_ = {}, VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ = {}, uint32_t indexTypeCount_ = {}, const VULKAN_HPP_NAMESPACE::IndexType *pIndexTypes_ = {}, const uint32_t *pIndexTypeValues_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), tokenType(tokenType_), stream(stream_), offset(offset_), vertexBindingUnit(vertexBindingUnit_), vertexDynamicStride(vertexDynamicStride_), pushconstantPipelineLayout(pushconstantPipelineLayout_), pushconstantShaderStageFlags(pushconstantShaderStageFlags_), pushconstantOffset(pushconstantOffset_), pushconstantSize(pushconstantSize_), indirectStateFlags(indirectStateFlags_), indexTypeCount(indexTypeCount_), pIndexTypes(pIndexTypes_), pIndexTypeValues(pIndexTypeValues_) { } VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV(IndirectCommandsLayoutTokenNV const &rhs) VULKAN_HPP_NOEXCEPT = default; IndirectCommandsLayoutTokenNV(VkIndirectCommandsLayoutTokenNV const &rhs) VULKAN_HPP_NOEXCEPT : IndirectCommandsLayoutTokenNV(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) IndirectCommandsLayoutTokenNV(VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_, uint32_t stream_, uint32_t offset_, uint32_t vertexBindingUnit_, VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_, VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_, VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_, uint32_t pushconstantOffset_, uint32_t pushconstantSize_, VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &indexTypes_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &indexTypeValues_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , tokenType(tokenType_) , stream(stream_) , offset(offset_) , vertexBindingUnit(vertexBindingUnit_) , vertexDynamicStride(vertexDynamicStride_) , pushconstantPipelineLayout(pushconstantPipelineLayout_) , pushconstantShaderStageFlags(pushconstantShaderStageFlags_) , pushconstantOffset(pushconstantOffset_) , pushconstantSize(pushconstantSize_) , indirectStateFlags(indirectStateFlags_) , indexTypeCount(static_cast(indexTypes_.size())) , pIndexTypes(indexTypes_.data()) , pIndexTypeValues(indexTypeValues_.data()) { # ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT(indexTypes_.size() == indexTypeValues_.size()); # else if(indexTypes_.size() != indexTypeValues_.size()) { throw LogicError(VULKAN_HPP_NAMESPACE_STRING "::IndirectCommandsLayoutTokenNV::IndirectCommandsLayoutTokenNV: indexTypes_.size() != indexTypeValues_.size()"); } # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ IndirectCommandsLayoutTokenNV &operator=(IndirectCommandsLayoutTokenNV const &rhs) VULKAN_HPP_NOEXCEPT = default; IndirectCommandsLayoutTokenNV &operator=(VkIndirectCommandsLayoutTokenNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV &setTokenType(VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_) VULKAN_HPP_NOEXCEPT { tokenType = tokenType_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV &setStream(uint32_t stream_) VULKAN_HPP_NOEXCEPT { stream = stream_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV &setOffset(uint32_t offset_) VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV &setVertexBindingUnit(uint32_t vertexBindingUnit_) VULKAN_HPP_NOEXCEPT { vertexBindingUnit = vertexBindingUnit_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV &setVertexDynamicStride(VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_) VULKAN_HPP_NOEXCEPT { vertexDynamicStride = vertexDynamicStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantPipelineLayout(VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_) VULKAN_HPP_NOEXCEPT { pushconstantPipelineLayout = pushconstantPipelineLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantShaderStageFlags(VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_) VULKAN_HPP_NOEXCEPT { pushconstantShaderStageFlags = pushconstantShaderStageFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV &setPushconstantOffset(uint32_t pushconstantOffset_) VULKAN_HPP_NOEXCEPT { pushconstantOffset = pushconstantOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV &setPushconstantSize(uint32_t pushconstantSize_) VULKAN_HPP_NOEXCEPT { pushconstantSize = pushconstantSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setIndirectStateFlags(VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_) VULKAN_HPP_NOEXCEPT { indirectStateFlags = indirectStateFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV &setIndexTypeCount(uint32_t indexTypeCount_) VULKAN_HPP_NOEXCEPT { indexTypeCount = indexTypeCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV &setPIndexTypes(const VULKAN_HPP_NAMESPACE::IndexType *pIndexTypes_) VULKAN_HPP_NOEXCEPT { pIndexTypes = pIndexTypes_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) IndirectCommandsLayoutTokenNV & setIndexTypes(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &indexTypes_) VULKAN_HPP_NOEXCEPT { indexTypeCount = static_cast(indexTypes_.size()); pIndexTypes = indexTypes_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV &setPIndexTypeValues(const uint32_t *pIndexTypeValues_) VULKAN_HPP_NOEXCEPT { pIndexTypeValues = pIndexTypeValues_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) IndirectCommandsLayoutTokenNV & setIndexTypeValues(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &indexTypeValues_) VULKAN_HPP_NOEXCEPT { indexTypeCount = static_cast(indexTypeValues_.size()); pIndexTypeValues = indexTypeValues_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkIndirectCommandsLayoutTokenNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkIndirectCommandsLayoutTokenNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, tokenType, stream, offset, vertexBindingUnit, vertexDynamicStride, pushconstantPipelineLayout, pushconstantShaderStageFlags, pushconstantOffset, pushconstantSize, indirectStateFlags, indexTypeCount, pIndexTypes, pIndexTypeValues); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(IndirectCommandsLayoutTokenNV const &) const = default; #else bool operator==(IndirectCommandsLayoutTokenNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (tokenType == rhs.tokenType) && (stream == rhs.stream) && (offset == rhs.offset) && (vertexBindingUnit == rhs.vertexBindingUnit) && (vertexDynamicStride == rhs.vertexDynamicStride) && (pushconstantPipelineLayout == rhs.pushconstantPipelineLayout) && (pushconstantShaderStageFlags == rhs.pushconstantShaderStageFlags) && (pushconstantOffset == rhs.pushconstantOffset) && (pushconstantSize == rhs.pushconstantSize) && (indirectStateFlags == rhs.indirectStateFlags) && (indexTypeCount == rhs.indexTypeCount) && (pIndexTypes == rhs.pIndexTypes) && (pIndexTypeValues == rhs.pIndexTypeValues); # endif } bool operator!=(IndirectCommandsLayoutTokenNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutTokenNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup; uint32_t stream = {}; uint32_t offset = {}; uint32_t vertexBindingUnit = {}; VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride = {}; VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout = {}; VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags = {}; uint32_t pushconstantOffset = {}; uint32_t pushconstantSize = {}; VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags = {}; uint32_t indexTypeCount = {}; const VULKAN_HPP_NAMESPACE::IndexType *pIndexTypes = {}; const uint32_t *pIndexTypeValues = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV) == sizeof(VkIndirectCommandsLayoutTokenNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "IndirectCommandsLayoutTokenNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = IndirectCommandsLayoutTokenNV; }; struct IndirectCommandsLayoutCreateInfoNV { using NativeType = VkIndirectCommandsLayoutCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutCreateInfoNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, uint32_t tokenCount_ = {}, const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV *pTokens_ = {}, uint32_t streamCount_ = {}, const uint32_t *pStreamStrides_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), pipelineBindPoint(pipelineBindPoint_), tokenCount(tokenCount_), pTokens(pTokens_), streamCount(streamCount_), pStreamStrides(pStreamStrides_) { } VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV(IndirectCommandsLayoutCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; IndirectCommandsLayoutCreateInfoNV(VkIndirectCommandsLayoutCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : IndirectCommandsLayoutCreateInfoNV(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) IndirectCommandsLayoutCreateInfoNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &tokens_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &streamStrides_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , pipelineBindPoint(pipelineBindPoint_) , tokenCount(static_cast(tokens_.size())) , pTokens(tokens_.data()) , streamCount(static_cast(streamStrides_.size())) , pStreamStrides(streamStrides_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ IndirectCommandsLayoutCreateInfoNV &operator=(IndirectCommandsLayoutCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; IndirectCommandsLayoutCreateInfoNV &operator=(VkIndirectCommandsLayoutCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV &setFlags(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPipelineBindPoint(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_) VULKAN_HPP_NOEXCEPT { pipelineBindPoint = pipelineBindPoint_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV &setTokenCount(uint32_t tokenCount_) VULKAN_HPP_NOEXCEPT { tokenCount = tokenCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPTokens(const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV *pTokens_) VULKAN_HPP_NOEXCEPT { pTokens = pTokens_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) IndirectCommandsLayoutCreateInfoNV & setTokens(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &tokens_) VULKAN_HPP_NOEXCEPT { tokenCount = static_cast(tokens_.size()); pTokens = tokens_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV &setStreamCount(uint32_t streamCount_) VULKAN_HPP_NOEXCEPT { streamCount = streamCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV &setPStreamStrides(const uint32_t *pStreamStrides_) VULKAN_HPP_NOEXCEPT { pStreamStrides = pStreamStrides_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) IndirectCommandsLayoutCreateInfoNV & setStreamStrides(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &streamStrides_) VULKAN_HPP_NOEXCEPT { streamCount = static_cast(streamStrides_.size()); pStreamStrides = streamStrides_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkIndirectCommandsLayoutCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkIndirectCommandsLayoutCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, pipelineBindPoint, tokenCount, pTokens, streamCount, pStreamStrides); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(IndirectCommandsLayoutCreateInfoNV const &) const = default; #else bool operator==(IndirectCommandsLayoutCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (pipelineBindPoint == rhs.pipelineBindPoint) && (tokenCount == rhs.tokenCount) && (pTokens == rhs.pTokens) && (streamCount == rhs.streamCount) && (pStreamStrides == rhs.pStreamStrides); # endif } bool operator!=(IndirectCommandsLayoutCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags = {}; VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; uint32_t tokenCount = {}; const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV *pTokens = {}; uint32_t streamCount = {}; const uint32_t *pStreamStrides = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV) == sizeof(VkIndirectCommandsLayoutCreateInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "IndirectCommandsLayoutCreateInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = IndirectCommandsLayoutCreateInfoNV; }; struct InitializePerformanceApiInfoINTEL { using NativeType = VkInitializePerformanceApiInfoINTEL; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInitializePerformanceApiInfoINTEL; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL(void *pUserData_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), pUserData(pUserData_) { } VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL(InitializePerformanceApiInfoINTEL const &rhs) VULKAN_HPP_NOEXCEPT = default; InitializePerformanceApiInfoINTEL(VkInitializePerformanceApiInfoINTEL const &rhs) VULKAN_HPP_NOEXCEPT : InitializePerformanceApiInfoINTEL(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ InitializePerformanceApiInfoINTEL &operator=(InitializePerformanceApiInfoINTEL const &rhs) VULKAN_HPP_NOEXCEPT = default; InitializePerformanceApiInfoINTEL &operator=(VkInitializePerformanceApiInfoINTEL const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 InitializePerformanceApiInfoINTEL &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 InitializePerformanceApiInfoINTEL &setPUserData(void *pUserData_) VULKAN_HPP_NOEXCEPT { pUserData = pUserData_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkInitializePerformanceApiInfoINTEL const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkInitializePerformanceApiInfoINTEL &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, pUserData); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(InitializePerformanceApiInfoINTEL const &) const = default; #else bool operator==(InitializePerformanceApiInfoINTEL const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (pUserData == rhs.pUserData); # endif } bool operator!=(InitializePerformanceApiInfoINTEL const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInitializePerformanceApiInfoINTEL; const void *pNext = {}; void *pUserData = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL) == sizeof(VkInitializePerformanceApiInfoINTEL), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "InitializePerformanceApiInfoINTEL is not nothrow_move_constructible!"); template<> struct CppType { using Type = InitializePerformanceApiInfoINTEL; }; struct InputAttachmentAspectReference { using NativeType = VkInputAttachmentAspectReference; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference(uint32_t subpass_ = {}, uint32_t inputAttachmentIndex_ = {}, VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}) VULKAN_HPP_NOEXCEPT : subpass(subpass_), inputAttachmentIndex(inputAttachmentIndex_), aspectMask(aspectMask_) { } VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference(InputAttachmentAspectReference const &rhs) VULKAN_HPP_NOEXCEPT = default; InputAttachmentAspectReference(VkInputAttachmentAspectReference const &rhs) VULKAN_HPP_NOEXCEPT : InputAttachmentAspectReference(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ InputAttachmentAspectReference &operator=(InputAttachmentAspectReference const &rhs) VULKAN_HPP_NOEXCEPT = default; InputAttachmentAspectReference &operator=(VkInputAttachmentAspectReference const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference &setSubpass(uint32_t subpass_) VULKAN_HPP_NOEXCEPT { subpass = subpass_; return *this; } VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference &setInputAttachmentIndex(uint32_t inputAttachmentIndex_) VULKAN_HPP_NOEXCEPT { inputAttachmentIndex = inputAttachmentIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference &setAspectMask(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_) VULKAN_HPP_NOEXCEPT { aspectMask = aspectMask_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkInputAttachmentAspectReference const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkInputAttachmentAspectReference &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(subpass, inputAttachmentIndex, aspectMask); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(InputAttachmentAspectReference const &) const = default; #else bool operator==(InputAttachmentAspectReference const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (subpass == rhs.subpass) && (inputAttachmentIndex == rhs.inputAttachmentIndex) && (aspectMask == rhs.aspectMask); # endif } bool operator!=(InputAttachmentAspectReference const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t subpass = {}; uint32_t inputAttachmentIndex = {}; VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference) == sizeof(VkInputAttachmentAspectReference), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "InputAttachmentAspectReference is not nothrow_move_constructible!"); using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference; struct InstanceCreateInfo { using NativeType = VkInstanceCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInstanceCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR InstanceCreateInfo(VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ = {}, const VULKAN_HPP_NAMESPACE::ApplicationInfo *pApplicationInfo_ = {}, uint32_t enabledLayerCount_ = {}, const char *const *ppEnabledLayerNames_ = {}, uint32_t enabledExtensionCount_ = {}, const char *const *ppEnabledExtensionNames_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), pApplicationInfo(pApplicationInfo_), enabledLayerCount(enabledLayerCount_), ppEnabledLayerNames(ppEnabledLayerNames_), enabledExtensionCount(enabledExtensionCount_), ppEnabledExtensionNames(ppEnabledExtensionNames_) { } VULKAN_HPP_CONSTEXPR InstanceCreateInfo(InstanceCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; InstanceCreateInfo(VkInstanceCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : InstanceCreateInfo(*reinterpret_cast(&rhs)) {} # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) InstanceCreateInfo(VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_, const VULKAN_HPP_NAMESPACE::ApplicationInfo *pApplicationInfo_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &pEnabledLayerNames_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &pEnabledExtensionNames_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , pApplicationInfo(pApplicationInfo_) , enabledLayerCount(static_cast(pEnabledLayerNames_.size())) , ppEnabledLayerNames(pEnabledLayerNames_.data()) , enabledExtensionCount(static_cast(pEnabledExtensionNames_.size())) , ppEnabledExtensionNames(pEnabledExtensionNames_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ InstanceCreateInfo &operator=(InstanceCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; InstanceCreateInfo &operator=(VkInstanceCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo &setFlags(VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo &setPApplicationInfo(const VULKAN_HPP_NAMESPACE::ApplicationInfo *pApplicationInfo_) VULKAN_HPP_NOEXCEPT { pApplicationInfo = pApplicationInfo_; return *this; } VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo &setEnabledLayerCount(uint32_t enabledLayerCount_) VULKAN_HPP_NOEXCEPT { enabledLayerCount = enabledLayerCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo &setPpEnabledLayerNames(const char *const *ppEnabledLayerNames_) VULKAN_HPP_NOEXCEPT { ppEnabledLayerNames = ppEnabledLayerNames_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) InstanceCreateInfo & setPEnabledLayerNames(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &pEnabledLayerNames_) VULKAN_HPP_NOEXCEPT { enabledLayerCount = static_cast(pEnabledLayerNames_.size()); ppEnabledLayerNames = pEnabledLayerNames_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo &setEnabledExtensionCount(uint32_t enabledExtensionCount_) VULKAN_HPP_NOEXCEPT { enabledExtensionCount = enabledExtensionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo &setPpEnabledExtensionNames(const char *const *ppEnabledExtensionNames_) VULKAN_HPP_NOEXCEPT { ppEnabledExtensionNames = ppEnabledExtensionNames_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) InstanceCreateInfo & setPEnabledExtensionNames(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &pEnabledExtensionNames_) VULKAN_HPP_NOEXCEPT { enabledExtensionCount = static_cast(pEnabledExtensionNames_.size()); ppEnabledExtensionNames = pEnabledExtensionNames_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkInstanceCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkInstanceCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, pApplicationInfo, enabledLayerCount, ppEnabledLayerNames, enabledExtensionCount, ppEnabledExtensionNames); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) std::strong_ordering operator<=>(InstanceCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { if(auto cmp = sType <=> rhs.sType; cmp != 0) return cmp; if(auto cmp = pNext <=> rhs.pNext; cmp != 0) return cmp; if(auto cmp = flags <=> rhs.flags; cmp != 0) return cmp; if(auto cmp = pApplicationInfo <=> rhs.pApplicationInfo; cmp != 0) return cmp; if(auto cmp = enabledLayerCount <=> rhs.enabledLayerCount; cmp != 0) return cmp; for(size_t i = 0; i < enabledLayerCount; ++i) { if(ppEnabledLayerNames[i] != rhs.ppEnabledLayerNames[i]) if(auto cmp = strcmp(ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i]); cmp != 0) return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater; } if(auto cmp = enabledExtensionCount <=> rhs.enabledExtensionCount; cmp != 0) return cmp; for(size_t i = 0; i < enabledExtensionCount; ++i) { if(ppEnabledExtensionNames[i] != rhs.ppEnabledExtensionNames[i]) if(auto cmp = strcmp(ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i]); cmp != 0) return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater; } return std::strong_ordering::equivalent; } #endif bool operator==(InstanceCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (pApplicationInfo == rhs.pApplicationInfo) && (enabledLayerCount == rhs.enabledLayerCount) && [this, rhs] { bool equal = true; for(size_t i = 0; equal && (i < enabledLayerCount); ++i) { equal = ((ppEnabledLayerNames[i] == rhs.ppEnabledLayerNames[i]) || (strcmp(ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i]) == 0)); } return equal; }() && (enabledExtensionCount == rhs.enabledExtensionCount) && [this, rhs] { bool equal = true; for(size_t i = 0; equal && (i < enabledExtensionCount); ++i) { equal = ((ppEnabledExtensionNames[i] == rhs.ppEnabledExtensionNames[i]) || (strcmp(ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i]) == 0)); } return equal; }(); } bool operator!=(InstanceCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInstanceCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags = {}; const VULKAN_HPP_NAMESPACE::ApplicationInfo *pApplicationInfo = {}; uint32_t enabledLayerCount = {}; const char *const *ppEnabledLayerNames = {}; uint32_t enabledExtensionCount = {}; const char *const *ppEnabledExtensionNames = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::InstanceCreateInfo) == sizeof(VkInstanceCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "InstanceCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = InstanceCreateInfo; }; struct LayerProperties { using NativeType = VkLayerProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 LayerProperties(std::array const &layerName_ = {}, uint32_t specVersion_ = {}, uint32_t implementationVersion_ = {}, std::array const &description_ = {}) VULKAN_HPP_NOEXCEPT : layerName(layerName_), specVersion(specVersion_), implementationVersion(implementationVersion_), description(description_) { } VULKAN_HPP_CONSTEXPR_14 LayerProperties(LayerProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; LayerProperties(VkLayerProperties const &rhs) VULKAN_HPP_NOEXCEPT : LayerProperties(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ LayerProperties &operator=(LayerProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; LayerProperties &operator=(VkLayerProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkLayerProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkLayerProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(layerName, specVersion, implementationVersion, description); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(LayerProperties const &) const = default; #else bool operator==(LayerProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (layerName == rhs.layerName) && (specVersion == rhs.specVersion) && (implementationVersion == rhs.implementationVersion) && (description == rhs.description); # endif } bool operator!=(LayerProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::ArrayWrapper1D layerName = {}; uint32_t specVersion = {}; uint32_t implementationVersion = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::LayerProperties) == sizeof(VkLayerProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "LayerProperties is not nothrow_move_constructible!"); #if defined(VK_USE_PLATFORM_MACOS_MVK) struct MacOSSurfaceCreateInfoMVK { using NativeType = VkMacOSSurfaceCreateInfoMVK; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMacosSurfaceCreateInfoMVK; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK(VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ = {}, const void *pView_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), pView(pView_) { } VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK(MacOSSurfaceCreateInfoMVK const &rhs) VULKAN_HPP_NOEXCEPT = default; MacOSSurfaceCreateInfoMVK(VkMacOSSurfaceCreateInfoMVK const &rhs) VULKAN_HPP_NOEXCEPT : MacOSSurfaceCreateInfoMVK(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ MacOSSurfaceCreateInfoMVK &operator=(MacOSSurfaceCreateInfoMVK const &rhs) VULKAN_HPP_NOEXCEPT = default; MacOSSurfaceCreateInfoMVK &operator=(VkMacOSSurfaceCreateInfoMVK const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK &setFlags(VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK &setPView(const void *pView_) VULKAN_HPP_NOEXCEPT { pView = pView_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkMacOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkMacOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std:: tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, pView); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(MacOSSurfaceCreateInfoMVK const &) const = default; # else bool operator==(MacOSSurfaceCreateInfoMVK const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (pView == rhs.pView); # endif } bool operator!=(MacOSSurfaceCreateInfoMVK const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMacosSurfaceCreateInfoMVK; const void *pNext = {}; VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags = {}; const void *pView = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK) == sizeof(VkMacOSSurfaceCreateInfoMVK), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "MacOSSurfaceCreateInfoMVK is not nothrow_move_constructible!"); template<> struct CppType { using Type = MacOSSurfaceCreateInfoMVK; }; #endif /*VK_USE_PLATFORM_MACOS_MVK*/ struct MappedMemoryRange { using NativeType = VkMappedMemoryRange; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMappedMemoryRange; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR MappedMemoryRange(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), memory(memory_), offset(offset_), size(size_) { } VULKAN_HPP_CONSTEXPR MappedMemoryRange(MappedMemoryRange const &rhs) VULKAN_HPP_NOEXCEPT = default; MappedMemoryRange(VkMappedMemoryRange const &rhs) VULKAN_HPP_NOEXCEPT : MappedMemoryRange(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ MappedMemoryRange &operator=(MappedMemoryRange const &rhs) VULKAN_HPP_NOEXCEPT = default; MappedMemoryRange &operator=(VkMappedMemoryRange const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange &setMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory_) VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange &setOffset(VULKAN_HPP_NAMESPACE::DeviceSize offset_) VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange &setSize(VULKAN_HPP_NAMESPACE::DeviceSize size_) VULKAN_HPP_NOEXCEPT { size = size_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkMappedMemoryRange const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkMappedMemoryRange &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, memory, offset, size); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(MappedMemoryRange const &) const = default; #else bool operator==(MappedMemoryRange const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (memory == rhs.memory) && (offset == rhs.offset) && (size == rhs.size); # endif } bool operator!=(MappedMemoryRange const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMappedMemoryRange; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; VULKAN_HPP_NAMESPACE::DeviceSize size = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::MappedMemoryRange) == sizeof(VkMappedMemoryRange), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "MappedMemoryRange is not nothrow_move_constructible!"); template<> struct CppType { using Type = MappedMemoryRange; }; struct MemoryAllocateFlagsInfo { using NativeType = VkMemoryAllocateFlagsInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateFlagsInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo(VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ = {}, uint32_t deviceMask_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), deviceMask(deviceMask_) { } VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo(MemoryAllocateFlagsInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryAllocateFlagsInfo(VkMemoryAllocateFlagsInfo const &rhs) VULKAN_HPP_NOEXCEPT : MemoryAllocateFlagsInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ MemoryAllocateFlagsInfo &operator=(MemoryAllocateFlagsInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryAllocateFlagsInfo &operator=(VkMemoryAllocateFlagsInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo &setFlags(VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo &setDeviceMask(uint32_t deviceMask_) VULKAN_HPP_NOEXCEPT { deviceMask = deviceMask_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkMemoryAllocateFlagsInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkMemoryAllocateFlagsInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, deviceMask); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(MemoryAllocateFlagsInfo const &) const = default; #else bool operator==(MemoryAllocateFlagsInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (deviceMask == rhs.deviceMask); # endif } bool operator!=(MemoryAllocateFlagsInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateFlagsInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags = {}; uint32_t deviceMask = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo) == sizeof(VkMemoryAllocateFlagsInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "MemoryAllocateFlagsInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = MemoryAllocateFlagsInfo; }; using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo; struct MemoryAllocateInfo { using NativeType = VkMemoryAllocateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR MemoryAllocateInfo(VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, uint32_t memoryTypeIndex_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), allocationSize(allocationSize_), memoryTypeIndex(memoryTypeIndex_) { } VULKAN_HPP_CONSTEXPR MemoryAllocateInfo(MemoryAllocateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryAllocateInfo(VkMemoryAllocateInfo const &rhs) VULKAN_HPP_NOEXCEPT : MemoryAllocateInfo(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ MemoryAllocateInfo &operator=(MemoryAllocateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryAllocateInfo &operator=(VkMemoryAllocateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo &setAllocationSize(VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_) VULKAN_HPP_NOEXCEPT { allocationSize = allocationSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo &setMemoryTypeIndex(uint32_t memoryTypeIndex_) VULKAN_HPP_NOEXCEPT { memoryTypeIndex = memoryTypeIndex_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, allocationSize, memoryTypeIndex); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(MemoryAllocateInfo const &) const = default; #else bool operator==(MemoryAllocateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (allocationSize == rhs.allocationSize) && (memoryTypeIndex == rhs.memoryTypeIndex); # endif } bool operator!=(MemoryAllocateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {}; uint32_t memoryTypeIndex = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::MemoryAllocateInfo) == sizeof(VkMemoryAllocateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "MemoryAllocateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = MemoryAllocateInfo; }; struct MemoryBarrier { using NativeType = VkMemoryBarrier; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR MemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), srcAccessMask(srcAccessMask_), dstAccessMask(dstAccessMask_) { } VULKAN_HPP_CONSTEXPR MemoryBarrier(MemoryBarrier const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryBarrier(VkMemoryBarrier const &rhs) VULKAN_HPP_NOEXCEPT : MemoryBarrier(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ MemoryBarrier &operator=(MemoryBarrier const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryBarrier &operator=(VkMemoryBarrier const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 MemoryBarrier &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryBarrier &setSrcAccessMask(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_) VULKAN_HPP_NOEXCEPT { srcAccessMask = srcAccessMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryBarrier &setDstAccessMask(VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_) VULKAN_HPP_NOEXCEPT { dstAccessMask = dstAccessMask_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, srcAccessMask, dstAccessMask); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(MemoryBarrier const &) const = default; #else bool operator==(MemoryBarrier const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (srcAccessMask == rhs.srcAccessMask) && (dstAccessMask == rhs.dstAccessMask); # endif } bool operator!=(MemoryBarrier const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier; const void *pNext = {}; VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::MemoryBarrier) == sizeof(VkMemoryBarrier), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "MemoryBarrier is not nothrow_move_constructible!"); template<> struct CppType { using Type = MemoryBarrier; }; struct MemoryDedicatedAllocateInfo { using NativeType = VkMemoryDedicatedAllocateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedAllocateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), image(image_), buffer(buffer_) { } VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo(MemoryDedicatedAllocateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryDedicatedAllocateInfo(VkMemoryDedicatedAllocateInfo const &rhs) VULKAN_HPP_NOEXCEPT : MemoryDedicatedAllocateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ MemoryDedicatedAllocateInfo &operator=(MemoryDedicatedAllocateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryDedicatedAllocateInfo &operator=(VkMemoryDedicatedAllocateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo &setImage(VULKAN_HPP_NAMESPACE::Image image_) VULKAN_HPP_NOEXCEPT { image = image_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo &setBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer_) VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkMemoryDedicatedAllocateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkMemoryDedicatedAllocateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, image, buffer); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(MemoryDedicatedAllocateInfo const &) const = default; #else bool operator==(MemoryDedicatedAllocateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (image == rhs.image) && (buffer == rhs.buffer); # endif } bool operator!=(MemoryDedicatedAllocateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedAllocateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Image image = {}; VULKAN_HPP_NAMESPACE::Buffer buffer = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo) == sizeof(VkMemoryDedicatedAllocateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "MemoryDedicatedAllocateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = MemoryDedicatedAllocateInfo; }; using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo; struct MemoryDedicatedRequirements { using NativeType = VkMemoryDedicatedRequirements; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedRequirements; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements(VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), prefersDedicatedAllocation(prefersDedicatedAllocation_), requiresDedicatedAllocation(requiresDedicatedAllocation_) { } VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements(MemoryDedicatedRequirements const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryDedicatedRequirements(VkMemoryDedicatedRequirements const &rhs) VULKAN_HPP_NOEXCEPT : MemoryDedicatedRequirements(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ MemoryDedicatedRequirements &operator=(MemoryDedicatedRequirements const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryDedicatedRequirements &operator=(VkMemoryDedicatedRequirements const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkMemoryDedicatedRequirements const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkMemoryDedicatedRequirements &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, prefersDedicatedAllocation, requiresDedicatedAllocation); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(MemoryDedicatedRequirements const &) const = default; #else bool operator==(MemoryDedicatedRequirements const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (prefersDedicatedAllocation == rhs.prefersDedicatedAllocation) && (requiresDedicatedAllocation == rhs.requiresDedicatedAllocation); # endif } bool operator!=(MemoryDedicatedRequirements const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedRequirements; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation = {}; VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements) == sizeof(VkMemoryDedicatedRequirements), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "MemoryDedicatedRequirements is not nothrow_move_constructible!"); template<> struct CppType { using Type = MemoryDedicatedRequirements; }; using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements; struct MemoryFdPropertiesKHR { using NativeType = VkMemoryFdPropertiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryFdPropertiesKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR(uint32_t memoryTypeBits_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), memoryTypeBits(memoryTypeBits_) { } VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR(MemoryFdPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryFdPropertiesKHR(VkMemoryFdPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT : MemoryFdPropertiesKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ MemoryFdPropertiesKHR &operator=(MemoryFdPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryFdPropertiesKHR &operator=(VkMemoryFdPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkMemoryFdPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkMemoryFdPropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, memoryTypeBits); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(MemoryFdPropertiesKHR const &) const = default; #else bool operator==(MemoryFdPropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (memoryTypeBits == rhs.memoryTypeBits); # endif } bool operator!=(MemoryFdPropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryFdPropertiesKHR; void *pNext = {}; uint32_t memoryTypeBits = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR) == sizeof(VkMemoryFdPropertiesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "MemoryFdPropertiesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = MemoryFdPropertiesKHR; }; #if defined(VK_USE_PLATFORM_ANDROID_KHR) struct MemoryGetAndroidHardwareBufferInfoANDROID { using NativeType = VkMemoryGetAndroidHardwareBufferInfoANDROID; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), memory(memory_) { } VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID(MemoryGetAndroidHardwareBufferInfoANDROID const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryGetAndroidHardwareBufferInfoANDROID(VkMemoryGetAndroidHardwareBufferInfoANDROID const &rhs) VULKAN_HPP_NOEXCEPT : MemoryGetAndroidHardwareBufferInfoANDROID(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ MemoryGetAndroidHardwareBufferInfoANDROID &operator=(MemoryGetAndroidHardwareBufferInfoANDROID const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryGetAndroidHardwareBufferInfoANDROID &operator=(VkMemoryGetAndroidHardwareBufferInfoANDROID const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID &setMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory_) VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkMemoryGetAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkMemoryGetAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, memory); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(MemoryGetAndroidHardwareBufferInfoANDROID const &) const = default; # else bool operator==(MemoryGetAndroidHardwareBufferInfoANDROID const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (memory == rhs.memory); # endif } bool operator!=(MemoryGetAndroidHardwareBufferInfoANDROID const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID) == sizeof(VkMemoryGetAndroidHardwareBufferInfoANDROID), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "MemoryGetAndroidHardwareBufferInfoANDROID is not nothrow_move_constructible!"); template<> struct CppType { using Type = MemoryGetAndroidHardwareBufferInfoANDROID; }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ struct MemoryGetFdInfoKHR { using NativeType = VkMemoryGetFdInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetFdInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), memory(memory_), handleType(handleType_) { } VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR(MemoryGetFdInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryGetFdInfoKHR(VkMemoryGetFdInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : MemoryGetFdInfoKHR(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ MemoryGetFdInfoKHR &operator=(MemoryGetFdInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryGetFdInfoKHR &operator=(VkMemoryGetFdInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR &setMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory_) VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR &setHandleType(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkMemoryGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkMemoryGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, memory, handleType); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(MemoryGetFdInfoKHR const &) const = default; #else bool operator==(MemoryGetFdInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (memory == rhs.memory) && (handleType == rhs.handleType); # endif } bool operator!=(MemoryGetFdInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetFdInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR) == sizeof(VkMemoryGetFdInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "MemoryGetFdInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = MemoryGetFdInfoKHR; }; struct MemoryGetRemoteAddressInfoNV { using NativeType = VkMemoryGetRemoteAddressInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetRemoteAddressInfoNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR MemoryGetRemoteAddressInfoNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), memory(memory_), handleType(handleType_) { } VULKAN_HPP_CONSTEXPR MemoryGetRemoteAddressInfoNV(MemoryGetRemoteAddressInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryGetRemoteAddressInfoNV(VkMemoryGetRemoteAddressInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : MemoryGetRemoteAddressInfoNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ MemoryGetRemoteAddressInfoNV &operator=(MemoryGetRemoteAddressInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryGetRemoteAddressInfoNV &operator=(VkMemoryGetRemoteAddressInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV &setMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory_) VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & setHandleType(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkMemoryGetRemoteAddressInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkMemoryGetRemoteAddressInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, memory, handleType); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(MemoryGetRemoteAddressInfoNV const &) const = default; #else bool operator==(MemoryGetRemoteAddressInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (memory == rhs.memory) && (handleType == rhs.handleType); # endif } bool operator!=(MemoryGetRemoteAddressInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetRemoteAddressInfoNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV) == sizeof(VkMemoryGetRemoteAddressInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "MemoryGetRemoteAddressInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = MemoryGetRemoteAddressInfoNV; }; #if defined(VK_USE_PLATFORM_WIN32_KHR) struct MemoryGetWin32HandleInfoKHR { using NativeType = VkMemoryGetWin32HandleInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetWin32HandleInfoKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), memory(memory_), handleType(handleType_) { } VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR(MemoryGetWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryGetWin32HandleInfoKHR(VkMemoryGetWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : MemoryGetWin32HandleInfoKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ MemoryGetWin32HandleInfoKHR &operator=(MemoryGetWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryGetWin32HandleInfoKHR &operator=(VkMemoryGetWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR &setMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory_) VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & setHandleType(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkMemoryGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkMemoryGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, memory, handleType); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(MemoryGetWin32HandleInfoKHR const &) const = default; # else bool operator==(MemoryGetWin32HandleInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (memory == rhs.memory) && (handleType == rhs.handleType); # endif } bool operator!=(MemoryGetWin32HandleInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR) == sizeof(VkMemoryGetWin32HandleInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "MemoryGetWin32HandleInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = MemoryGetWin32HandleInfoKHR; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #if defined(VK_USE_PLATFORM_FUCHSIA) struct MemoryGetZirconHandleInfoFUCHSIA { using NativeType = VkMemoryGetZirconHandleInfoFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetZirconHandleInfoFUCHSIA; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR MemoryGetZirconHandleInfoFUCHSIA( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), memory(memory_), handleType(handleType_) { } VULKAN_HPP_CONSTEXPR MemoryGetZirconHandleInfoFUCHSIA(MemoryGetZirconHandleInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryGetZirconHandleInfoFUCHSIA(VkMemoryGetZirconHandleInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT : MemoryGetZirconHandleInfoFUCHSIA(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ MemoryGetZirconHandleInfoFUCHSIA &operator=(MemoryGetZirconHandleInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryGetZirconHandleInfoFUCHSIA &operator=(VkMemoryGetZirconHandleInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA &setMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory_) VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & setHandleType(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkMemoryGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkMemoryGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, memory, handleType); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(MemoryGetZirconHandleInfoFUCHSIA const &) const = default; # else bool operator==(MemoryGetZirconHandleInfoFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (memory == rhs.memory) && (handleType == rhs.handleType); # endif } bool operator!=(MemoryGetZirconHandleInfoFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetZirconHandleInfoFUCHSIA; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA) == sizeof(VkMemoryGetZirconHandleInfoFUCHSIA), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "MemoryGetZirconHandleInfoFUCHSIA is not nothrow_move_constructible!"); template<> struct CppType { using Type = MemoryGetZirconHandleInfoFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ struct MemoryHeap { using NativeType = VkMemoryHeap; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR MemoryHeap(VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags_ = {}) VULKAN_HPP_NOEXCEPT : size(size_), flags(flags_) { } VULKAN_HPP_CONSTEXPR MemoryHeap(MemoryHeap const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryHeap(VkMemoryHeap const &rhs) VULKAN_HPP_NOEXCEPT : MemoryHeap(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ MemoryHeap &operator=(MemoryHeap const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryHeap &operator=(VkMemoryHeap const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkMemoryHeap const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkMemoryHeap &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(size, flags); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(MemoryHeap const &) const = default; #else bool operator==(MemoryHeap const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (size == rhs.size) && (flags == rhs.flags); # endif } bool operator!=(MemoryHeap const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::DeviceSize size = {}; VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::MemoryHeap) == sizeof(VkMemoryHeap), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "MemoryHeap is not nothrow_move_constructible!"); struct MemoryHostPointerPropertiesEXT { using NativeType = VkMemoryHostPointerPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryHostPointerPropertiesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT(uint32_t memoryTypeBits_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), memoryTypeBits(memoryTypeBits_) { } VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT(MemoryHostPointerPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryHostPointerPropertiesEXT(VkMemoryHostPointerPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT : MemoryHostPointerPropertiesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ MemoryHostPointerPropertiesEXT &operator=(MemoryHostPointerPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryHostPointerPropertiesEXT &operator=(VkMemoryHostPointerPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkMemoryHostPointerPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkMemoryHostPointerPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, memoryTypeBits); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(MemoryHostPointerPropertiesEXT const &) const = default; #else bool operator==(MemoryHostPointerPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (memoryTypeBits == rhs.memoryTypeBits); # endif } bool operator!=(MemoryHostPointerPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT; void *pNext = {}; uint32_t memoryTypeBits = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT) == sizeof(VkMemoryHostPointerPropertiesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "MemoryHostPointerPropertiesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = MemoryHostPointerPropertiesEXT; }; struct MemoryOpaqueCaptureAddressAllocateInfo { using NativeType = VkMemoryOpaqueCaptureAddressAllocateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo(uint64_t opaqueCaptureAddress_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), opaqueCaptureAddress(opaqueCaptureAddress_) { } VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo(MemoryOpaqueCaptureAddressAllocateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryOpaqueCaptureAddressAllocateInfo(VkMemoryOpaqueCaptureAddressAllocateInfo const &rhs) VULKAN_HPP_NOEXCEPT : MemoryOpaqueCaptureAddressAllocateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ MemoryOpaqueCaptureAddressAllocateInfo &operator=(MemoryOpaqueCaptureAddressAllocateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryOpaqueCaptureAddressAllocateInfo &operator=(VkMemoryOpaqueCaptureAddressAllocateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo &setOpaqueCaptureAddress(uint64_t opaqueCaptureAddress_) VULKAN_HPP_NOEXCEPT { opaqueCaptureAddress = opaqueCaptureAddress_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkMemoryOpaqueCaptureAddressAllocateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkMemoryOpaqueCaptureAddressAllocateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, opaqueCaptureAddress); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(MemoryOpaqueCaptureAddressAllocateInfo const &) const = default; #else bool operator==(MemoryOpaqueCaptureAddressAllocateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (opaqueCaptureAddress == rhs.opaqueCaptureAddress); # endif } bool operator!=(MemoryOpaqueCaptureAddressAllocateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo; const void *pNext = {}; uint64_t opaqueCaptureAddress = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo) == sizeof(VkMemoryOpaqueCaptureAddressAllocateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "MemoryOpaqueCaptureAddressAllocateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = MemoryOpaqueCaptureAddressAllocateInfo; }; using MemoryOpaqueCaptureAddressAllocateInfoKHR = MemoryOpaqueCaptureAddressAllocateInfo; struct MemoryPriorityAllocateInfoEXT { using NativeType = VkMemoryPriorityAllocateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryPriorityAllocateInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT(float priority_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), priority(priority_) { } VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT(MemoryPriorityAllocateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryPriorityAllocateInfoEXT(VkMemoryPriorityAllocateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : MemoryPriorityAllocateInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ MemoryPriorityAllocateInfoEXT &operator=(MemoryPriorityAllocateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryPriorityAllocateInfoEXT &operator=(VkMemoryPriorityAllocateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 MemoryPriorityAllocateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryPriorityAllocateInfoEXT &setPriority(float priority_) VULKAN_HPP_NOEXCEPT { priority = priority_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkMemoryPriorityAllocateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkMemoryPriorityAllocateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, priority); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(MemoryPriorityAllocateInfoEXT const &) const = default; #else bool operator==(MemoryPriorityAllocateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (priority == rhs.priority); # endif } bool operator!=(MemoryPriorityAllocateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryPriorityAllocateInfoEXT; const void *pNext = {}; float priority = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT) == sizeof(VkMemoryPriorityAllocateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "MemoryPriorityAllocateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = MemoryPriorityAllocateInfoEXT; }; struct MemoryRequirements { using NativeType = VkMemoryRequirements; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR MemoryRequirements(VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize alignment_ = {}, uint32_t memoryTypeBits_ = {}) VULKAN_HPP_NOEXCEPT : size(size_), alignment(alignment_), memoryTypeBits(memoryTypeBits_) { } VULKAN_HPP_CONSTEXPR MemoryRequirements(MemoryRequirements const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryRequirements(VkMemoryRequirements const &rhs) VULKAN_HPP_NOEXCEPT : MemoryRequirements(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ MemoryRequirements &operator=(MemoryRequirements const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryRequirements &operator=(VkMemoryRequirements const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkMemoryRequirements &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(size, alignment, memoryTypeBits); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(MemoryRequirements const &) const = default; #else bool operator==(MemoryRequirements const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (size == rhs.size) && (alignment == rhs.alignment) && (memoryTypeBits == rhs.memoryTypeBits); # endif } bool operator!=(MemoryRequirements const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::DeviceSize size = {}; VULKAN_HPP_NAMESPACE::DeviceSize alignment = {}; uint32_t memoryTypeBits = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::MemoryRequirements) == sizeof(VkMemoryRequirements), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "MemoryRequirements is not nothrow_move_constructible!"); struct MemoryRequirements2 { using NativeType = VkMemoryRequirements2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryRequirements2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR MemoryRequirements2(VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), memoryRequirements(memoryRequirements_) { } VULKAN_HPP_CONSTEXPR MemoryRequirements2(MemoryRequirements2 const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryRequirements2(VkMemoryRequirements2 const &rhs) VULKAN_HPP_NOEXCEPT : MemoryRequirements2(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ MemoryRequirements2 &operator=(MemoryRequirements2 const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryRequirements2 &operator=(VkMemoryRequirements2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, memoryRequirements); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(MemoryRequirements2 const &) const = default; #else bool operator==(MemoryRequirements2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (memoryRequirements == rhs.memoryRequirements); # endif } bool operator!=(MemoryRequirements2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryRequirements2; void *pNext = {}; VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::MemoryRequirements2) == sizeof(VkMemoryRequirements2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "MemoryRequirements2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = MemoryRequirements2; }; using MemoryRequirements2KHR = MemoryRequirements2; struct MemoryType { using NativeType = VkMemoryType; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR MemoryType(VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags_ = {}, uint32_t heapIndex_ = {}) VULKAN_HPP_NOEXCEPT : propertyFlags(propertyFlags_), heapIndex(heapIndex_) { } VULKAN_HPP_CONSTEXPR MemoryType(MemoryType const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryType(VkMemoryType const &rhs) VULKAN_HPP_NOEXCEPT : MemoryType(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ MemoryType &operator=(MemoryType const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryType &operator=(VkMemoryType const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkMemoryType const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkMemoryType &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(propertyFlags, heapIndex); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(MemoryType const &) const = default; #else bool operator==(MemoryType const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (propertyFlags == rhs.propertyFlags) && (heapIndex == rhs.heapIndex); # endif } bool operator!=(MemoryType const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags = {}; uint32_t heapIndex = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::MemoryType) == sizeof(VkMemoryType), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "MemoryType is not nothrow_move_constructible!"); #if defined(VK_USE_PLATFORM_WIN32_KHR) struct MemoryWin32HandlePropertiesKHR { using NativeType = VkMemoryWin32HandlePropertiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryWin32HandlePropertiesKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR(uint32_t memoryTypeBits_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), memoryTypeBits(memoryTypeBits_) { } VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR(MemoryWin32HandlePropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryWin32HandlePropertiesKHR(VkMemoryWin32HandlePropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT : MemoryWin32HandlePropertiesKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ MemoryWin32HandlePropertiesKHR &operator=(MemoryWin32HandlePropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryWin32HandlePropertiesKHR &operator=(VkMemoryWin32HandlePropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkMemoryWin32HandlePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkMemoryWin32HandlePropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, memoryTypeBits); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(MemoryWin32HandlePropertiesKHR const &) const = default; # else bool operator==(MemoryWin32HandlePropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (memoryTypeBits == rhs.memoryTypeBits); # endif } bool operator!=(MemoryWin32HandlePropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR; void *pNext = {}; uint32_t memoryTypeBits = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR) == sizeof(VkMemoryWin32HandlePropertiesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "MemoryWin32HandlePropertiesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = MemoryWin32HandlePropertiesKHR; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #if defined(VK_USE_PLATFORM_FUCHSIA) struct MemoryZirconHandlePropertiesFUCHSIA { using NativeType = VkMemoryZirconHandlePropertiesFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryZirconHandlePropertiesFUCHSIA; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA(uint32_t memoryTypeBits_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), memoryTypeBits(memoryTypeBits_) { } VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA(MemoryZirconHandlePropertiesFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryZirconHandlePropertiesFUCHSIA(VkMemoryZirconHandlePropertiesFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT : MemoryZirconHandlePropertiesFUCHSIA(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ MemoryZirconHandlePropertiesFUCHSIA &operator=(MemoryZirconHandlePropertiesFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; MemoryZirconHandlePropertiesFUCHSIA &operator=(VkMemoryZirconHandlePropertiesFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkMemoryZirconHandlePropertiesFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkMemoryZirconHandlePropertiesFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, memoryTypeBits); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(MemoryZirconHandlePropertiesFUCHSIA const &) const = default; # else bool operator==(MemoryZirconHandlePropertiesFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (memoryTypeBits == rhs.memoryTypeBits); # endif } bool operator!=(MemoryZirconHandlePropertiesFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryZirconHandlePropertiesFUCHSIA; void *pNext = {}; uint32_t memoryTypeBits = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA) == sizeof(VkMemoryZirconHandlePropertiesFUCHSIA), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "MemoryZirconHandlePropertiesFUCHSIA is not nothrow_move_constructible!"); template<> struct CppType { using Type = MemoryZirconHandlePropertiesFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined(VK_USE_PLATFORM_METAL_EXT) struct MetalSurfaceCreateInfoEXT { using NativeType = VkMetalSurfaceCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMetalSurfaceCreateInfoEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT(VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ = {}, const CAMetalLayer *pLayer_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), pLayer(pLayer_) { } VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT(MetalSurfaceCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; MetalSurfaceCreateInfoEXT(VkMetalSurfaceCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : MetalSurfaceCreateInfoEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ MetalSurfaceCreateInfoEXT &operator=(MetalSurfaceCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; MetalSurfaceCreateInfoEXT &operator=(VkMetalSurfaceCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT &setFlags(VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT &setPLayer(const CAMetalLayer *pLayer_) VULKAN_HPP_NOEXCEPT { pLayer = pLayer_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkMetalSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkMetalSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, pLayer); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(MetalSurfaceCreateInfoEXT const &) const = default; # else bool operator==(MetalSurfaceCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (pLayer == rhs.pLayer); # endif } bool operator!=(MetalSurfaceCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMetalSurfaceCreateInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags = {}; const CAMetalLayer *pLayer = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT) == sizeof(VkMetalSurfaceCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "MetalSurfaceCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = MetalSurfaceCreateInfoEXT; }; #endif /*VK_USE_PLATFORM_METAL_EXT*/ struct MultiDrawIndexedInfoEXT { using NativeType = VkMultiDrawIndexedInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR MultiDrawIndexedInfoEXT(uint32_t firstIndex_ = {}, uint32_t indexCount_ = {}, int32_t vertexOffset_ = {}) VULKAN_HPP_NOEXCEPT : firstIndex(firstIndex_), indexCount(indexCount_), vertexOffset(vertexOffset_) { } VULKAN_HPP_CONSTEXPR MultiDrawIndexedInfoEXT(MultiDrawIndexedInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; MultiDrawIndexedInfoEXT(VkMultiDrawIndexedInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : MultiDrawIndexedInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ MultiDrawIndexedInfoEXT &operator=(MultiDrawIndexedInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; MultiDrawIndexedInfoEXT &operator=(VkMultiDrawIndexedInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT &setFirstIndex(uint32_t firstIndex_) VULKAN_HPP_NOEXCEPT { firstIndex = firstIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT &setIndexCount(uint32_t indexCount_) VULKAN_HPP_NOEXCEPT { indexCount = indexCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT &setVertexOffset(int32_t vertexOffset_) VULKAN_HPP_NOEXCEPT { vertexOffset = vertexOffset_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkMultiDrawIndexedInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkMultiDrawIndexedInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(firstIndex, indexCount, vertexOffset); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(MultiDrawIndexedInfoEXT const &) const = default; #else bool operator==(MultiDrawIndexedInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (firstIndex == rhs.firstIndex) && (indexCount == rhs.indexCount) && (vertexOffset == rhs.vertexOffset); # endif } bool operator!=(MultiDrawIndexedInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t firstIndex = {}; uint32_t indexCount = {}; int32_t vertexOffset = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT) == sizeof(VkMultiDrawIndexedInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "MultiDrawIndexedInfoEXT is not nothrow_move_constructible!"); struct MultiDrawInfoEXT { using NativeType = VkMultiDrawInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR MultiDrawInfoEXT(uint32_t firstVertex_ = {}, uint32_t vertexCount_ = {}) VULKAN_HPP_NOEXCEPT : firstVertex(firstVertex_), vertexCount(vertexCount_) { } VULKAN_HPP_CONSTEXPR MultiDrawInfoEXT(MultiDrawInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; MultiDrawInfoEXT(VkMultiDrawInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : MultiDrawInfoEXT(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ MultiDrawInfoEXT &operator=(MultiDrawInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; MultiDrawInfoEXT &operator=(VkMultiDrawInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 MultiDrawInfoEXT &setFirstVertex(uint32_t firstVertex_) VULKAN_HPP_NOEXCEPT { firstVertex = firstVertex_; return *this; } VULKAN_HPP_CONSTEXPR_14 MultiDrawInfoEXT &setVertexCount(uint32_t vertexCount_) VULKAN_HPP_NOEXCEPT { vertexCount = vertexCount_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkMultiDrawInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkMultiDrawInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(firstVertex, vertexCount); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(MultiDrawInfoEXT const &) const = default; #else bool operator==(MultiDrawInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (firstVertex == rhs.firstVertex) && (vertexCount == rhs.vertexCount); # endif } bool operator!=(MultiDrawInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t firstVertex = {}; uint32_t vertexCount = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT) == sizeof(VkMultiDrawInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "MultiDrawInfoEXT is not nothrow_move_constructible!"); struct MultisamplePropertiesEXT { using NativeType = VkMultisamplePropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultisamplePropertiesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT(VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), maxSampleLocationGridSize(maxSampleLocationGridSize_) { } VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT(MultisamplePropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; MultisamplePropertiesEXT(VkMultisamplePropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT : MultisamplePropertiesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ MultisamplePropertiesEXT &operator=(MultisamplePropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; MultisamplePropertiesEXT &operator=(VkMultisamplePropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkMultisamplePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkMultisamplePropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, maxSampleLocationGridSize); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(MultisamplePropertiesEXT const &) const = default; #else bool operator==(MultisamplePropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (maxSampleLocationGridSize == rhs.maxSampleLocationGridSize); # endif } bool operator!=(MultisamplePropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisamplePropertiesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT) == sizeof(VkMultisamplePropertiesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "MultisamplePropertiesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = MultisamplePropertiesEXT; }; struct MultiviewPerViewAttributesInfoNVX { using NativeType = VkMultiviewPerViewAttributesInfoNVX; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultiviewPerViewAttributesInfoNVX; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR MultiviewPerViewAttributesInfoNVX(VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), perViewAttributes(perViewAttributes_), perViewAttributesPositionXOnly(perViewAttributesPositionXOnly_) { } VULKAN_HPP_CONSTEXPR MultiviewPerViewAttributesInfoNVX(MultiviewPerViewAttributesInfoNVX const &rhs) VULKAN_HPP_NOEXCEPT = default; MultiviewPerViewAttributesInfoNVX(VkMultiviewPerViewAttributesInfoNVX const &rhs) VULKAN_HPP_NOEXCEPT : MultiviewPerViewAttributesInfoNVX(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ MultiviewPerViewAttributesInfoNVX &operator=(MultiviewPerViewAttributesInfoNVX const &rhs) VULKAN_HPP_NOEXCEPT = default; MultiviewPerViewAttributesInfoNVX &operator=(VkMultiviewPerViewAttributesInfoNVX const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX &setPerViewAttributes(VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes_) VULKAN_HPP_NOEXCEPT { perViewAttributes = perViewAttributes_; return *this; } VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & setPerViewAttributesPositionXOnly(VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly_) VULKAN_HPP_NOEXCEPT { perViewAttributesPositionXOnly = perViewAttributesPositionXOnly_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkMultiviewPerViewAttributesInfoNVX const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkMultiviewPerViewAttributesInfoNVX &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, perViewAttributes, perViewAttributesPositionXOnly); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(MultiviewPerViewAttributesInfoNVX const &) const = default; #else bool operator==(MultiviewPerViewAttributesInfoNVX const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (perViewAttributes == rhs.perViewAttributes) && (perViewAttributesPositionXOnly == rhs.perViewAttributesPositionXOnly); # endif } bool operator!=(MultiviewPerViewAttributesInfoNVX const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultiviewPerViewAttributesInfoNVX; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes = {}; VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX) == sizeof(VkMultiviewPerViewAttributesInfoNVX), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "MultiviewPerViewAttributesInfoNVX is not nothrow_move_constructible!"); template<> struct CppType { using Type = MultiviewPerViewAttributesInfoNVX; }; struct MutableDescriptorTypeListVALVE { using NativeType = VkMutableDescriptorTypeListVALVE; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR MutableDescriptorTypeListVALVE(uint32_t descriptorTypeCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorType *pDescriptorTypes_ = {}) VULKAN_HPP_NOEXCEPT : descriptorTypeCount(descriptorTypeCount_), pDescriptorTypes(pDescriptorTypes_) { } VULKAN_HPP_CONSTEXPR MutableDescriptorTypeListVALVE(MutableDescriptorTypeListVALVE const &rhs) VULKAN_HPP_NOEXCEPT = default; MutableDescriptorTypeListVALVE(VkMutableDescriptorTypeListVALVE const &rhs) VULKAN_HPP_NOEXCEPT : MutableDescriptorTypeListVALVE(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) MutableDescriptorTypeListVALVE(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &descriptorTypes_) : descriptorTypeCount(static_cast(descriptorTypes_.size())) , pDescriptorTypes(descriptorTypes_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ MutableDescriptorTypeListVALVE &operator=(MutableDescriptorTypeListVALVE const &rhs) VULKAN_HPP_NOEXCEPT = default; MutableDescriptorTypeListVALVE &operator=(VkMutableDescriptorTypeListVALVE const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListVALVE &setDescriptorTypeCount(uint32_t descriptorTypeCount_) VULKAN_HPP_NOEXCEPT { descriptorTypeCount = descriptorTypeCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListVALVE & setPDescriptorTypes(const VULKAN_HPP_NAMESPACE::DescriptorType *pDescriptorTypes_) VULKAN_HPP_NOEXCEPT { pDescriptorTypes = pDescriptorTypes_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) MutableDescriptorTypeListVALVE &setDescriptorTypes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &descriptorTypes_) VULKAN_HPP_NOEXCEPT { descriptorTypeCount = static_cast(descriptorTypes_.size()); pDescriptorTypes = descriptorTypes_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkMutableDescriptorTypeListVALVE const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkMutableDescriptorTypeListVALVE &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(descriptorTypeCount, pDescriptorTypes); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(MutableDescriptorTypeListVALVE const &) const = default; #else bool operator==(MutableDescriptorTypeListVALVE const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (descriptorTypeCount == rhs.descriptorTypeCount) && (pDescriptorTypes == rhs.pDescriptorTypes); # endif } bool operator!=(MutableDescriptorTypeListVALVE const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t descriptorTypeCount = {}; const VULKAN_HPP_NAMESPACE::DescriptorType *pDescriptorTypes = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE) == sizeof(VkMutableDescriptorTypeListVALVE), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "MutableDescriptorTypeListVALVE is not nothrow_move_constructible!"); struct MutableDescriptorTypeCreateInfoVALVE { using NativeType = VkMutableDescriptorTypeCreateInfoVALVE; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMutableDescriptorTypeCreateInfoVALVE; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoVALVE(uint32_t mutableDescriptorTypeListCount_ = {}, const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE *pMutableDescriptorTypeLists_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), mutableDescriptorTypeListCount(mutableDescriptorTypeListCount_), pMutableDescriptorTypeLists(pMutableDescriptorTypeLists_) { } VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoVALVE(MutableDescriptorTypeCreateInfoVALVE const &rhs) VULKAN_HPP_NOEXCEPT = default; MutableDescriptorTypeCreateInfoVALVE(VkMutableDescriptorTypeCreateInfoVALVE const &rhs) VULKAN_HPP_NOEXCEPT : MutableDescriptorTypeCreateInfoVALVE(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) MutableDescriptorTypeCreateInfoVALVE( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &mutableDescriptorTypeLists_, const void *pNext_ = nullptr) : pNext(pNext_) , mutableDescriptorTypeListCount(static_cast(mutableDescriptorTypeLists_.size())) , pMutableDescriptorTypeLists(mutableDescriptorTypeLists_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ MutableDescriptorTypeCreateInfoVALVE &operator=(MutableDescriptorTypeCreateInfoVALVE const &rhs) VULKAN_HPP_NOEXCEPT = default; MutableDescriptorTypeCreateInfoVALVE &operator=(VkMutableDescriptorTypeCreateInfoVALVE const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoVALVE &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoVALVE & setMutableDescriptorTypeListCount(uint32_t mutableDescriptorTypeListCount_) VULKAN_HPP_NOEXCEPT { mutableDescriptorTypeListCount = mutableDescriptorTypeListCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoVALVE & setPMutableDescriptorTypeLists(const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE *pMutableDescriptorTypeLists_) VULKAN_HPP_NOEXCEPT { pMutableDescriptorTypeLists = pMutableDescriptorTypeLists_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) MutableDescriptorTypeCreateInfoVALVE &setMutableDescriptorTypeLists( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &mutableDescriptorTypeLists_) VULKAN_HPP_NOEXCEPT { mutableDescriptorTypeListCount = static_cast(mutableDescriptorTypeLists_.size()); pMutableDescriptorTypeLists = mutableDescriptorTypeLists_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkMutableDescriptorTypeCreateInfoVALVE const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkMutableDescriptorTypeCreateInfoVALVE &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, mutableDescriptorTypeListCount, pMutableDescriptorTypeLists); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(MutableDescriptorTypeCreateInfoVALVE const &) const = default; #else bool operator==(MutableDescriptorTypeCreateInfoVALVE const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (mutableDescriptorTypeListCount == rhs.mutableDescriptorTypeListCount) && (pMutableDescriptorTypeLists == rhs.pMutableDescriptorTypeLists); # endif } bool operator!=(MutableDescriptorTypeCreateInfoVALVE const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMutableDescriptorTypeCreateInfoVALVE; const void *pNext = {}; uint32_t mutableDescriptorTypeListCount = {}; const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE *pMutableDescriptorTypeLists = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoVALVE) == sizeof(VkMutableDescriptorTypeCreateInfoVALVE), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "MutableDescriptorTypeCreateInfoVALVE is not nothrow_move_constructible!"); template<> struct CppType { using Type = MutableDescriptorTypeCreateInfoVALVE; }; struct PastPresentationTimingGOOGLE { using NativeType = VkPastPresentationTimingGOOGLE; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE(uint32_t presentID_ = {}, uint64_t desiredPresentTime_ = {}, uint64_t actualPresentTime_ = {}, uint64_t earliestPresentTime_ = {}, uint64_t presentMargin_ = {}) VULKAN_HPP_NOEXCEPT : presentID(presentID_), desiredPresentTime(desiredPresentTime_), actualPresentTime(actualPresentTime_), earliestPresentTime(earliestPresentTime_), presentMargin(presentMargin_) { } VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE(PastPresentationTimingGOOGLE const &rhs) VULKAN_HPP_NOEXCEPT = default; PastPresentationTimingGOOGLE(VkPastPresentationTimingGOOGLE const &rhs) VULKAN_HPP_NOEXCEPT : PastPresentationTimingGOOGLE(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PastPresentationTimingGOOGLE &operator=(PastPresentationTimingGOOGLE const &rhs) VULKAN_HPP_NOEXCEPT = default; PastPresentationTimingGOOGLE &operator=(VkPastPresentationTimingGOOGLE const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPastPresentationTimingGOOGLE const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPastPresentationTimingGOOGLE &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(presentID, desiredPresentTime, actualPresentTime, earliestPresentTime, presentMargin); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PastPresentationTimingGOOGLE const &) const = default; #else bool operator==(PastPresentationTimingGOOGLE const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (presentID == rhs.presentID) && (desiredPresentTime == rhs.desiredPresentTime) && (actualPresentTime == rhs.actualPresentTime) && (earliestPresentTime == rhs.earliestPresentTime) && (presentMargin == rhs.presentMargin); # endif } bool operator!=(PastPresentationTimingGOOGLE const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t presentID = {}; uint64_t desiredPresentTime = {}; uint64_t actualPresentTime = {}; uint64_t earliestPresentTime = {}; uint64_t presentMargin = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE) == sizeof(VkPastPresentationTimingGOOGLE), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PastPresentationTimingGOOGLE is not nothrow_move_constructible!"); struct PerformanceConfigurationAcquireInfoINTEL { using NativeType = VkPerformanceConfigurationAcquireInfoINTEL; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceConfigurationAcquireInfoINTEL; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), type(type_) { } VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL(PerformanceConfigurationAcquireInfoINTEL const &rhs) VULKAN_HPP_NOEXCEPT = default; PerformanceConfigurationAcquireInfoINTEL(VkPerformanceConfigurationAcquireInfoINTEL const &rhs) VULKAN_HPP_NOEXCEPT : PerformanceConfigurationAcquireInfoINTEL(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PerformanceConfigurationAcquireInfoINTEL &operator=(PerformanceConfigurationAcquireInfoINTEL const &rhs) VULKAN_HPP_NOEXCEPT = default; PerformanceConfigurationAcquireInfoINTEL &operator=(VkPerformanceConfigurationAcquireInfoINTEL const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL & setType(VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_) VULKAN_HPP_NOEXCEPT { type = type_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPerformanceConfigurationAcquireInfoINTEL const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPerformanceConfigurationAcquireInfoINTEL &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, type); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PerformanceConfigurationAcquireInfoINTEL const &) const = default; #else bool operator==(PerformanceConfigurationAcquireInfoINTEL const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (type == rhs.type); # endif } bool operator!=(PerformanceConfigurationAcquireInfoINTEL const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL) == sizeof(VkPerformanceConfigurationAcquireInfoINTEL), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PerformanceConfigurationAcquireInfoINTEL is not nothrow_move_constructible!"); template<> struct CppType { using Type = PerformanceConfigurationAcquireInfoINTEL; }; struct PerformanceCounterDescriptionKHR { using NativeType = VkPerformanceCounterDescriptionKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterDescriptionKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR(VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_ = {}, std::array const &name_ = {}, std::array const &category_ = {}, std::array const &description_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), name(name_), category(category_), description(description_) { } VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR(PerformanceCounterDescriptionKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PerformanceCounterDescriptionKHR(VkPerformanceCounterDescriptionKHR const &rhs) VULKAN_HPP_NOEXCEPT : PerformanceCounterDescriptionKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PerformanceCounterDescriptionKHR &operator=(PerformanceCounterDescriptionKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PerformanceCounterDescriptionKHR &operator=(VkPerformanceCounterDescriptionKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPerformanceCounterDescriptionKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPerformanceCounterDescriptionKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, name, category, description); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PerformanceCounterDescriptionKHR const &) const = default; #else bool operator==(PerformanceCounterDescriptionKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (name == rhs.name) && (category == rhs.category) && (description == rhs.description); # endif } bool operator!=(PerformanceCounterDescriptionKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterDescriptionKHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D category = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR) == sizeof(VkPerformanceCounterDescriptionKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PerformanceCounterDescriptionKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PerformanceCounterDescriptionKHR; }; struct PerformanceCounterKHR { using NativeType = VkPerformanceCounterKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR(VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit_ = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric, VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope_ = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer, VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage_ = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32, std::array const &uuid_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), unit(unit_), scope(scope_), storage(storage_), uuid(uuid_) { } VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR(PerformanceCounterKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PerformanceCounterKHR(VkPerformanceCounterKHR const &rhs) VULKAN_HPP_NOEXCEPT : PerformanceCounterKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PerformanceCounterKHR &operator=(PerformanceCounterKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PerformanceCounterKHR &operator=(VkPerformanceCounterKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPerformanceCounterKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPerformanceCounterKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, unit, scope, storage, uuid); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PerformanceCounterKHR const &) const = default; #else bool operator==(PerformanceCounterKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (unit == rhs.unit) && (scope == rhs.scope) && (storage == rhs.storage) && (uuid == rhs.uuid); # endif } bool operator!=(PerformanceCounterKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterKHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric; VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer; VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32; VULKAN_HPP_NAMESPACE::ArrayWrapper1D uuid = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PerformanceCounterKHR) == sizeof(VkPerformanceCounterKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PerformanceCounterKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PerformanceCounterKHR; }; union PerformanceCounterResultKHR { using NativeType = VkPerformanceCounterResultKHR; #if !defined(VULKAN_HPP_NO_UNION_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR(int32_t int32_ = {}) : int32(int32_) {} VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR(int64_t int64_) : int64(int64_) {} VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR(uint32_t uint32_) : uint32(uint32_) {} VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR(uint64_t uint64_) : uint64(uint64_) {} VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR(float float32_) : float32(float32_) {} VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR(double float64_) : float64(float64_) {} #endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ #if !defined(VULKAN_HPP_NO_UNION_SETTERS) VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR &setInt32(int32_t int32_) VULKAN_HPP_NOEXCEPT { int32 = int32_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR &setInt64(int64_t int64_) VULKAN_HPP_NOEXCEPT { int64 = int64_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR &setUint32(uint32_t uint32_) VULKAN_HPP_NOEXCEPT { uint32 = uint32_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR &setUint64(uint64_t uint64_) VULKAN_HPP_NOEXCEPT { uint64 = uint64_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR &setFloat32(float float32_) VULKAN_HPP_NOEXCEPT { float32 = float32_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR &setFloat64(double float64_) VULKAN_HPP_NOEXCEPT { float64 = float64_; return *this; } #endif /*VULKAN_HPP_NO_UNION_SETTERS*/ operator VkPerformanceCounterResultKHR const &() const { return *reinterpret_cast(this); } operator VkPerformanceCounterResultKHR &() { return *reinterpret_cast(this); } int32_t int32; int64_t int64; uint32_t uint32; uint64_t uint64; float float32; double float64; }; struct PerformanceMarkerInfoINTEL { using NativeType = VkPerformanceMarkerInfoINTEL; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceMarkerInfoINTEL; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL(uint64_t marker_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), marker(marker_) { } VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL(PerformanceMarkerInfoINTEL const &rhs) VULKAN_HPP_NOEXCEPT = default; PerformanceMarkerInfoINTEL(VkPerformanceMarkerInfoINTEL const &rhs) VULKAN_HPP_NOEXCEPT : PerformanceMarkerInfoINTEL(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PerformanceMarkerInfoINTEL &operator=(PerformanceMarkerInfoINTEL const &rhs) VULKAN_HPP_NOEXCEPT = default; PerformanceMarkerInfoINTEL &operator=(VkPerformanceMarkerInfoINTEL const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL &setMarker(uint64_t marker_) VULKAN_HPP_NOEXCEPT { marker = marker_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPerformanceMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, marker); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PerformanceMarkerInfoINTEL const &) const = default; #else bool operator==(PerformanceMarkerInfoINTEL const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (marker == rhs.marker); # endif } bool operator!=(PerformanceMarkerInfoINTEL const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceMarkerInfoINTEL; const void *pNext = {}; uint64_t marker = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL) == sizeof(VkPerformanceMarkerInfoINTEL), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PerformanceMarkerInfoINTEL is not nothrow_move_constructible!"); template<> struct CppType { using Type = PerformanceMarkerInfoINTEL; }; struct PerformanceOverrideInfoINTEL { using NativeType = VkPerformanceOverrideInfoINTEL; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceOverrideInfoINTEL; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware, VULKAN_HPP_NAMESPACE::Bool32 enable_ = {}, uint64_t parameter_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), type(type_), enable(enable_), parameter(parameter_) { } VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL(PerformanceOverrideInfoINTEL const &rhs) VULKAN_HPP_NOEXCEPT = default; PerformanceOverrideInfoINTEL(VkPerformanceOverrideInfoINTEL const &rhs) VULKAN_HPP_NOEXCEPT : PerformanceOverrideInfoINTEL(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PerformanceOverrideInfoINTEL &operator=(PerformanceOverrideInfoINTEL const &rhs) VULKAN_HPP_NOEXCEPT = default; PerformanceOverrideInfoINTEL &operator=(VkPerformanceOverrideInfoINTEL const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL &setType(VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_) VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL &setEnable(VULKAN_HPP_NAMESPACE::Bool32 enable_) VULKAN_HPP_NOEXCEPT { enable = enable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL &setParameter(uint64_t parameter_) VULKAN_HPP_NOEXCEPT { parameter = parameter_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPerformanceOverrideInfoINTEL const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPerformanceOverrideInfoINTEL &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, type, enable, parameter); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PerformanceOverrideInfoINTEL const &) const = default; #else bool operator==(PerformanceOverrideInfoINTEL const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (type == rhs.type) && (enable == rhs.enable) && (parameter == rhs.parameter); # endif } bool operator!=(PerformanceOverrideInfoINTEL const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware; VULKAN_HPP_NAMESPACE::Bool32 enable = {}; uint64_t parameter = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL) == sizeof(VkPerformanceOverrideInfoINTEL), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PerformanceOverrideInfoINTEL is not nothrow_move_constructible!"); template<> struct CppType { using Type = PerformanceOverrideInfoINTEL; }; struct PerformanceQuerySubmitInfoKHR { using NativeType = VkPerformanceQuerySubmitInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceQuerySubmitInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR(uint32_t counterPassIndex_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), counterPassIndex(counterPassIndex_) { } VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR(PerformanceQuerySubmitInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PerformanceQuerySubmitInfoKHR(VkPerformanceQuerySubmitInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : PerformanceQuerySubmitInfoKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PerformanceQuerySubmitInfoKHR &operator=(PerformanceQuerySubmitInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PerformanceQuerySubmitInfoKHR &operator=(VkPerformanceQuerySubmitInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR &setCounterPassIndex(uint32_t counterPassIndex_) VULKAN_HPP_NOEXCEPT { counterPassIndex = counterPassIndex_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPerformanceQuerySubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPerformanceQuerySubmitInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, counterPassIndex); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PerformanceQuerySubmitInfoKHR const &) const = default; #else bool operator==(PerformanceQuerySubmitInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (counterPassIndex == rhs.counterPassIndex); # endif } bool operator!=(PerformanceQuerySubmitInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceQuerySubmitInfoKHR; const void *pNext = {}; uint32_t counterPassIndex = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR) == sizeof(VkPerformanceQuerySubmitInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PerformanceQuerySubmitInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PerformanceQuerySubmitInfoKHR; }; struct PerformanceStreamMarkerInfoINTEL { using NativeType = VkPerformanceStreamMarkerInfoINTEL; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceStreamMarkerInfoINTEL; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL(uint32_t marker_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), marker(marker_) { } VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL(PerformanceStreamMarkerInfoINTEL const &rhs) VULKAN_HPP_NOEXCEPT = default; PerformanceStreamMarkerInfoINTEL(VkPerformanceStreamMarkerInfoINTEL const &rhs) VULKAN_HPP_NOEXCEPT : PerformanceStreamMarkerInfoINTEL(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PerformanceStreamMarkerInfoINTEL &operator=(PerformanceStreamMarkerInfoINTEL const &rhs) VULKAN_HPP_NOEXCEPT = default; PerformanceStreamMarkerInfoINTEL &operator=(VkPerformanceStreamMarkerInfoINTEL const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL &setMarker(uint32_t marker_) VULKAN_HPP_NOEXCEPT { marker = marker_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPerformanceStreamMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPerformanceStreamMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, marker); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PerformanceStreamMarkerInfoINTEL const &) const = default; #else bool operator==(PerformanceStreamMarkerInfoINTEL const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (marker == rhs.marker); # endif } bool operator!=(PerformanceStreamMarkerInfoINTEL const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceStreamMarkerInfoINTEL; const void *pNext = {}; uint32_t marker = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL) == sizeof(VkPerformanceStreamMarkerInfoINTEL), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PerformanceStreamMarkerInfoINTEL is not nothrow_move_constructible!"); template<> struct CppType { using Type = PerformanceStreamMarkerInfoINTEL; }; union PerformanceValueDataINTEL { using NativeType = VkPerformanceValueDataINTEL; #if !defined(VULKAN_HPP_NO_UNION_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL(uint32_t value32_ = {}) : value32(value32_) {} VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL(uint64_t value64_) : value64(value64_) {} VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL(float valueFloat_) : valueFloat(valueFloat_) {} VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL(const char *valueString_) : valueString(valueString_) {} #endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ #if !defined(VULKAN_HPP_NO_UNION_SETTERS) VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL &setValue32(uint32_t value32_) VULKAN_HPP_NOEXCEPT { value32 = value32_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL &setValue64(uint64_t value64_) VULKAN_HPP_NOEXCEPT { value64 = value64_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL &setValueFloat(float valueFloat_) VULKAN_HPP_NOEXCEPT { valueFloat = valueFloat_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL &setValueBool(VULKAN_HPP_NAMESPACE::Bool32 valueBool_) VULKAN_HPP_NOEXCEPT { valueBool = valueBool_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL &setValueString(const char *valueString_) VULKAN_HPP_NOEXCEPT { valueString = valueString_; return *this; } #endif /*VULKAN_HPP_NO_UNION_SETTERS*/ operator VkPerformanceValueDataINTEL const &() const { return *reinterpret_cast(this); } operator VkPerformanceValueDataINTEL &() { return *reinterpret_cast(this); } #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS uint32_t value32; uint64_t value64; float valueFloat; VULKAN_HPP_NAMESPACE::Bool32 valueBool; const char *valueString; #else uint32_t value32; uint64_t value64; float valueFloat; VkBool32 valueBool; const char *valueString; #endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; struct PerformanceValueINTEL { using NativeType = VkPerformanceValueINTEL; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL(VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32, VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ = {}) VULKAN_HPP_NOEXCEPT : type(type_), data(data_) { } VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL(PerformanceValueINTEL const &rhs) VULKAN_HPP_NOEXCEPT = default; PerformanceValueINTEL(VkPerformanceValueINTEL const &rhs) VULKAN_HPP_NOEXCEPT : PerformanceValueINTEL(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PerformanceValueINTEL &operator=(PerformanceValueINTEL const &rhs) VULKAN_HPP_NOEXCEPT = default; PerformanceValueINTEL &operator=(VkPerformanceValueINTEL const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL &setType(VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_) VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL &setData(VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const &data_) VULKAN_HPP_NOEXCEPT { data = data_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPerformanceValueINTEL const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPerformanceValueINTEL &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(type, data); } #endif public: VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32; VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PerformanceValueINTEL) == sizeof(VkPerformanceValueINTEL), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PerformanceValueINTEL is not nothrow_move_constructible!"); struct PhysicalDevice16BitStorageFeatures { using NativeType = VkPhysicalDevice16BitStorageFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice16BitStorageFeatures; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures(VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), storageBuffer16BitAccess(storageBuffer16BitAccess_), uniformAndStorageBuffer16BitAccess(uniformAndStorageBuffer16BitAccess_), storagePushConstant16(storagePushConstant16_), storageInputOutput16(storageInputOutput16_) { } VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures(PhysicalDevice16BitStorageFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevice16BitStorageFeatures(VkPhysicalDevice16BitStorageFeatures const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDevice16BitStorageFeatures(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDevice16BitStorageFeatures &operator=(PhysicalDevice16BitStorageFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevice16BitStorageFeatures &operator=(VkPhysicalDevice16BitStorageFeatures const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setStorageBuffer16BitAccess(VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_) VULKAN_HPP_NOEXCEPT { storageBuffer16BitAccess = storageBuffer16BitAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setUniformAndStorageBuffer16BitAccess(VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_) VULKAN_HPP_NOEXCEPT { uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setStoragePushConstant16(VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_) VULKAN_HPP_NOEXCEPT { storagePushConstant16 = storagePushConstant16_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setStorageInputOutput16(VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_) VULKAN_HPP_NOEXCEPT { storageInputOutput16 = storageInputOutput16_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDevice16BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDevice16BitStorageFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, storageBuffer16BitAccess, uniformAndStorageBuffer16BitAccess, storagePushConstant16, storageInputOutput16); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDevice16BitStorageFeatures const &) const = default; #else bool operator==(PhysicalDevice16BitStorageFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (storageBuffer16BitAccess == rhs.storageBuffer16BitAccess) && (uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess) && (storagePushConstant16 == rhs.storagePushConstant16) && (storageInputOutput16 == rhs.storageInputOutput16); # endif } bool operator!=(PhysicalDevice16BitStorageFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {}; VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {}; VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {}; VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures) == sizeof(VkPhysicalDevice16BitStorageFeatures), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDevice16BitStorageFeatures is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDevice16BitStorageFeatures; }; using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures; struct PhysicalDevice4444FormatsFeaturesEXT { using NativeType = VkPhysicalDevice4444FormatsFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ = {}, VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), formatA4R4G4B4(formatA4R4G4B4_), formatA4B4G4R4(formatA4B4G4R4_) { } VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT(PhysicalDevice4444FormatsFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevice4444FormatsFeaturesEXT(VkPhysicalDevice4444FormatsFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDevice4444FormatsFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDevice4444FormatsFeaturesEXT &operator=(PhysicalDevice4444FormatsFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevice4444FormatsFeaturesEXT &operator=(VkPhysicalDevice4444FormatsFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT &setFormatA4R4G4B4(VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_) VULKAN_HPP_NOEXCEPT { formatA4R4G4B4 = formatA4R4G4B4_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT &setFormatA4B4G4R4(VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_) VULKAN_HPP_NOEXCEPT { formatA4B4G4R4 = formatA4B4G4R4_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDevice4444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDevice4444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, formatA4R4G4B4, formatA4B4G4R4); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDevice4444FormatsFeaturesEXT const &) const = default; #else bool operator==(PhysicalDevice4444FormatsFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (formatA4R4G4B4 == rhs.formatA4R4G4B4) && (formatA4B4G4R4 == rhs.formatA4B4G4R4); # endif } bool operator!=(PhysicalDevice4444FormatsFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4 = {}; VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4 = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT) == sizeof(VkPhysicalDevice4444FormatsFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDevice4444FormatsFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDevice4444FormatsFeaturesEXT; }; struct PhysicalDevice8BitStorageFeatures { using NativeType = VkPhysicalDevice8BitStorageFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice8BitStorageFeatures; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures(VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), storageBuffer8BitAccess(storageBuffer8BitAccess_), uniformAndStorageBuffer8BitAccess(uniformAndStorageBuffer8BitAccess_), storagePushConstant8(storagePushConstant8_) { } VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures(PhysicalDevice8BitStorageFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevice8BitStorageFeatures(VkPhysicalDevice8BitStorageFeatures const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDevice8BitStorageFeatures(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDevice8BitStorageFeatures &operator=(PhysicalDevice8BitStorageFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevice8BitStorageFeatures &operator=(VkPhysicalDevice8BitStorageFeatures const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & setStorageBuffer8BitAccess(VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_) VULKAN_HPP_NOEXCEPT { storageBuffer8BitAccess = storageBuffer8BitAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & setUniformAndStorageBuffer8BitAccess(VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_) VULKAN_HPP_NOEXCEPT { uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & setStoragePushConstant8(VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_) VULKAN_HPP_NOEXCEPT { storagePushConstant8 = storagePushConstant8_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDevice8BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDevice8BitStorageFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, storageBuffer8BitAccess, uniformAndStorageBuffer8BitAccess, storagePushConstant8); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDevice8BitStorageFeatures const &) const = default; #else bool operator==(PhysicalDevice8BitStorageFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (storageBuffer8BitAccess == rhs.storageBuffer8BitAccess) && (uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess) && (storagePushConstant8 == rhs.storagePushConstant8); # endif } bool operator!=(PhysicalDevice8BitStorageFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice8BitStorageFeatures; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {}; VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {}; VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures) == sizeof(VkPhysicalDevice8BitStorageFeatures), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDevice8BitStorageFeatures is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDevice8BitStorageFeatures; }; using PhysicalDevice8BitStorageFeaturesKHR = PhysicalDevice8BitStorageFeatures; struct PhysicalDeviceASTCDecodeFeaturesEXT { using NativeType = VkPhysicalDeviceASTCDecodeFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), decodeModeSharedExponent(decodeModeSharedExponent_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT(PhysicalDeviceASTCDecodeFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceASTCDecodeFeaturesEXT(VkPhysicalDeviceASTCDecodeFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceASTCDecodeFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceASTCDecodeFeaturesEXT &operator=(PhysicalDeviceASTCDecodeFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceASTCDecodeFeaturesEXT &operator=(VkPhysicalDeviceASTCDecodeFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT & setDecodeModeSharedExponent(VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_) VULKAN_HPP_NOEXCEPT { decodeModeSharedExponent = decodeModeSharedExponent_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceASTCDecodeFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceASTCDecodeFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, decodeModeSharedExponent); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceASTCDecodeFeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceASTCDecodeFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (decodeModeSharedExponent == rhs.decodeModeSharedExponent); # endif } bool operator!=(PhysicalDeviceASTCDecodeFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT) == sizeof(VkPhysicalDeviceASTCDecodeFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceASTCDecodeFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceASTCDecodeFeaturesEXT; }; struct PhysicalDeviceAccelerationStructureFeaturesKHR { using NativeType = VkPhysicalDeviceAccelerationStructureFeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructureFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure_ = {}, VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild_ = {}, VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), accelerationStructure(accelerationStructure_), accelerationStructureCaptureReplay(accelerationStructureCaptureReplay_), accelerationStructureIndirectBuild(accelerationStructureIndirectBuild_), accelerationStructureHostCommands(accelerationStructureHostCommands_), descriptorBindingAccelerationStructureUpdateAfterBind(descriptorBindingAccelerationStructureUpdateAfterBind_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructureFeaturesKHR(PhysicalDeviceAccelerationStructureFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceAccelerationStructureFeaturesKHR(VkPhysicalDeviceAccelerationStructureFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceAccelerationStructureFeaturesKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceAccelerationStructureFeaturesKHR &operator=(PhysicalDeviceAccelerationStructureFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceAccelerationStructureFeaturesKHR &operator=(VkPhysicalDeviceAccelerationStructureFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructure(VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure_) VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureCaptureReplay(VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay_) VULKAN_HPP_NOEXCEPT { accelerationStructureCaptureReplay = accelerationStructureCaptureReplay_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureIndirectBuild(VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild_) VULKAN_HPP_NOEXCEPT { accelerationStructureIndirectBuild = accelerationStructureIndirectBuild_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureHostCommands(VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_) VULKAN_HPP_NOEXCEPT { accelerationStructureHostCommands = accelerationStructureHostCommands_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR &setDescriptorBindingAccelerationStructureUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_) VULKAN_HPP_NOEXCEPT { descriptorBindingAccelerationStructureUpdateAfterBind = descriptorBindingAccelerationStructureUpdateAfterBind_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceAccelerationStructureFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceAccelerationStructureFeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, accelerationStructure, accelerationStructureCaptureReplay, accelerationStructureIndirectBuild, accelerationStructureHostCommands, descriptorBindingAccelerationStructureUpdateAfterBind); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceAccelerationStructureFeaturesKHR const &) const = default; #else bool operator==(PhysicalDeviceAccelerationStructureFeaturesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (accelerationStructure == rhs.accelerationStructure) && (accelerationStructureCaptureReplay == rhs.accelerationStructureCaptureReplay) && (accelerationStructureIndirectBuild == rhs.accelerationStructureIndirectBuild) && (accelerationStructureHostCommands == rhs.accelerationStructureHostCommands) && (descriptorBindingAccelerationStructureUpdateAfterBind == rhs.descriptorBindingAccelerationStructureUpdateAfterBind); # endif } bool operator!=(PhysicalDeviceAccelerationStructureFeaturesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure = {}; VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay = {}; VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild = {}; VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands = {}; VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR) == sizeof(VkPhysicalDeviceAccelerationStructureFeaturesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceAccelerationStructureFeaturesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceAccelerationStructureFeaturesKHR; }; struct PhysicalDeviceAccelerationStructurePropertiesKHR { using NativeType = VkPhysicalDeviceAccelerationStructurePropertiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructurePropertiesKHR(uint64_t maxGeometryCount_ = {}, uint64_t maxInstanceCount_ = {}, uint64_t maxPrimitiveCount_ = {}, uint32_t maxPerStageDescriptorAccelerationStructures_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ = {}, uint32_t maxDescriptorSetAccelerationStructures_ = {}, uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures_ = {}, uint32_t minAccelerationStructureScratchOffsetAlignment_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), maxGeometryCount(maxGeometryCount_), maxInstanceCount(maxInstanceCount_), maxPrimitiveCount(maxPrimitiveCount_), maxPerStageDescriptorAccelerationStructures(maxPerStageDescriptorAccelerationStructures_), maxPerStageDescriptorUpdateAfterBindAccelerationStructures(maxPerStageDescriptorUpdateAfterBindAccelerationStructures_), maxDescriptorSetAccelerationStructures(maxDescriptorSetAccelerationStructures_), maxDescriptorSetUpdateAfterBindAccelerationStructures(maxDescriptorSetUpdateAfterBindAccelerationStructures_), minAccelerationStructureScratchOffsetAlignment(minAccelerationStructureScratchOffsetAlignment_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructurePropertiesKHR(PhysicalDeviceAccelerationStructurePropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceAccelerationStructurePropertiesKHR(VkPhysicalDeviceAccelerationStructurePropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceAccelerationStructurePropertiesKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceAccelerationStructurePropertiesKHR &operator=(PhysicalDeviceAccelerationStructurePropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceAccelerationStructurePropertiesKHR &operator=(VkPhysicalDeviceAccelerationStructurePropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceAccelerationStructurePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceAccelerationStructurePropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, maxGeometryCount, maxInstanceCount, maxPrimitiveCount, maxPerStageDescriptorAccelerationStructures, maxPerStageDescriptorUpdateAfterBindAccelerationStructures, maxDescriptorSetAccelerationStructures, maxDescriptorSetUpdateAfterBindAccelerationStructures, minAccelerationStructureScratchOffsetAlignment); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceAccelerationStructurePropertiesKHR const &) const = default; #else bool operator==(PhysicalDeviceAccelerationStructurePropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (maxGeometryCount == rhs.maxGeometryCount) && (maxInstanceCount == rhs.maxInstanceCount) && (maxPrimitiveCount == rhs.maxPrimitiveCount) && (maxPerStageDescriptorAccelerationStructures == rhs.maxPerStageDescriptorAccelerationStructures) && (maxPerStageDescriptorUpdateAfterBindAccelerationStructures == rhs.maxPerStageDescriptorUpdateAfterBindAccelerationStructures) && (maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures) && (maxDescriptorSetUpdateAfterBindAccelerationStructures == rhs.maxDescriptorSetUpdateAfterBindAccelerationStructures) && (minAccelerationStructureScratchOffsetAlignment == rhs.minAccelerationStructureScratchOffsetAlignment); # endif } bool operator!=(PhysicalDeviceAccelerationStructurePropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR; void *pNext = {}; uint64_t maxGeometryCount = {}; uint64_t maxInstanceCount = {}; uint64_t maxPrimitiveCount = {}; uint32_t maxPerStageDescriptorAccelerationStructures = {}; uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures = {}; uint32_t maxDescriptorSetAccelerationStructures = {}; uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures = {}; uint32_t minAccelerationStructureScratchOffsetAlignment = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR) == sizeof(VkPhysicalDeviceAccelerationStructurePropertiesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceAccelerationStructurePropertiesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceAccelerationStructurePropertiesKHR; }; struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT { using NativeType = VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), advancedBlendCoherentOperations(advancedBlendCoherentOperations_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT(PhysicalDeviceBlendOperationAdvancedFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceBlendOperationAdvancedFeaturesEXT(VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceBlendOperationAdvancedFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceBlendOperationAdvancedFeaturesEXT &operator=(PhysicalDeviceBlendOperationAdvancedFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceBlendOperationAdvancedFeaturesEXT &operator=(VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setAdvancedBlendCoherentOperations(VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_) VULKAN_HPP_NOEXCEPT { advancedBlendCoherentOperations = advancedBlendCoherentOperations_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, advancedBlendCoherentOperations); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceBlendOperationAdvancedFeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceBlendOperationAdvancedFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations); # endif } bool operator!=(PhysicalDeviceBlendOperationAdvancedFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT) == sizeof(VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceBlendOperationAdvancedFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceBlendOperationAdvancedFeaturesEXT; }; struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT { using NativeType = VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT(uint32_t advancedBlendMaxColorAttachments_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), advancedBlendMaxColorAttachments(advancedBlendMaxColorAttachments_), advancedBlendIndependentBlend(advancedBlendIndependentBlend_), advancedBlendNonPremultipliedSrcColor(advancedBlendNonPremultipliedSrcColor_), advancedBlendNonPremultipliedDstColor(advancedBlendNonPremultipliedDstColor_), advancedBlendCorrelatedOverlap(advancedBlendCorrelatedOverlap_), advancedBlendAllOperations(advancedBlendAllOperations_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT(PhysicalDeviceBlendOperationAdvancedPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceBlendOperationAdvancedPropertiesEXT(VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceBlendOperationAdvancedPropertiesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=(PhysicalDeviceBlendOperationAdvancedPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceBlendOperationAdvancedPropertiesEXT &operator=(VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, advancedBlendMaxColorAttachments, advancedBlendIndependentBlend, advancedBlendNonPremultipliedSrcColor, advancedBlendNonPremultipliedDstColor, advancedBlendCorrelatedOverlap, advancedBlendAllOperations); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceBlendOperationAdvancedPropertiesEXT const &) const = default; #else bool operator==(PhysicalDeviceBlendOperationAdvancedPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments) && (advancedBlendIndependentBlend == rhs.advancedBlendIndependentBlend) && (advancedBlendNonPremultipliedSrcColor == rhs.advancedBlendNonPremultipliedSrcColor) && (advancedBlendNonPremultipliedDstColor == rhs.advancedBlendNonPremultipliedDstColor) && (advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap) && (advancedBlendAllOperations == rhs.advancedBlendAllOperations); # endif } bool operator!=(PhysicalDeviceBlendOperationAdvancedPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT; void *pNext = {}; uint32_t advancedBlendMaxColorAttachments = {}; VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend = {}; VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor = {}; VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor = {}; VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap = {}; VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT) == sizeof(VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceBlendOperationAdvancedPropertiesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceBlendOperationAdvancedPropertiesEXT; }; struct PhysicalDeviceBorderColorSwizzleFeaturesEXT { using NativeType = VkPhysicalDeviceBorderColorSwizzleFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceBorderColorSwizzleFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle_ = {}, VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), borderColorSwizzle(borderColorSwizzle_), borderColorSwizzleFromImage(borderColorSwizzleFromImage_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceBorderColorSwizzleFeaturesEXT(PhysicalDeviceBorderColorSwizzleFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceBorderColorSwizzleFeaturesEXT(VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceBorderColorSwizzleFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceBorderColorSwizzleFeaturesEXT &operator=(PhysicalDeviceBorderColorSwizzleFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceBorderColorSwizzleFeaturesEXT &operator=(VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & setBorderColorSwizzle(VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle_) VULKAN_HPP_NOEXCEPT { borderColorSwizzle = borderColorSwizzle_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & setBorderColorSwizzleFromImage(VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage_) VULKAN_HPP_NOEXCEPT { borderColorSwizzleFromImage = borderColorSwizzleFromImage_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, borderColorSwizzle, borderColorSwizzleFromImage); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceBorderColorSwizzleFeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceBorderColorSwizzleFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (borderColorSwizzle == rhs.borderColorSwizzle) && (borderColorSwizzleFromImage == rhs.borderColorSwizzleFromImage); # endif } bool operator!=(PhysicalDeviceBorderColorSwizzleFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle = {}; VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT) == sizeof(VkPhysicalDeviceBorderColorSwizzleFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceBorderColorSwizzleFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceBorderColorSwizzleFeaturesEXT; }; struct PhysicalDeviceBufferDeviceAddressFeatures { using NativeType = VkPhysicalDeviceBufferDeviceAddressFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures(VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), bufferDeviceAddress(bufferDeviceAddress_), bufferDeviceAddressCaptureReplay(bufferDeviceAddressCaptureReplay_), bufferDeviceAddressMultiDevice(bufferDeviceAddressMultiDevice_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures(PhysicalDeviceBufferDeviceAddressFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceBufferDeviceAddressFeatures(VkPhysicalDeviceBufferDeviceAddressFeatures const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceBufferDeviceAddressFeatures(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceBufferDeviceAddressFeatures &operator=(PhysicalDeviceBufferDeviceAddressFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceBufferDeviceAddressFeatures &operator=(VkPhysicalDeviceBufferDeviceAddressFeatures const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddress(VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_) VULKAN_HPP_NOEXCEPT { bufferDeviceAddress = bufferDeviceAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressCaptureReplay(VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_) VULKAN_HPP_NOEXCEPT { bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressMultiDevice(VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_) VULKAN_HPP_NOEXCEPT { bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceBufferDeviceAddressFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceBufferDeviceAddressFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceBufferDeviceAddressFeatures const &) const = default; #else bool operator==(PhysicalDeviceBufferDeviceAddressFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (bufferDeviceAddress == rhs.bufferDeviceAddress) && (bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay) && (bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice); # endif } bool operator!=(PhysicalDeviceBufferDeviceAddressFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures) == sizeof(VkPhysicalDeviceBufferDeviceAddressFeatures), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceBufferDeviceAddressFeatures is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceBufferDeviceAddressFeatures; }; using PhysicalDeviceBufferDeviceAddressFeaturesKHR = PhysicalDeviceBufferDeviceAddressFeatures; struct PhysicalDeviceBufferDeviceAddressFeaturesEXT { using NativeType = VkPhysicalDeviceBufferDeviceAddressFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), bufferDeviceAddress(bufferDeviceAddress_), bufferDeviceAddressCaptureReplay(bufferDeviceAddressCaptureReplay_), bufferDeviceAddressMultiDevice(bufferDeviceAddressMultiDevice_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT(PhysicalDeviceBufferDeviceAddressFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceBufferDeviceAddressFeaturesEXT(VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceBufferDeviceAddressFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceBufferDeviceAddressFeaturesEXT &operator=(PhysicalDeviceBufferDeviceAddressFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceBufferDeviceAddressFeaturesEXT &operator=(VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddress(VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_) VULKAN_HPP_NOEXCEPT { bufferDeviceAddress = bufferDeviceAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressCaptureReplay(VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_) VULKAN_HPP_NOEXCEPT { bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressMultiDevice(VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_) VULKAN_HPP_NOEXCEPT { bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceBufferDeviceAddressFeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceBufferDeviceAddressFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (bufferDeviceAddress == rhs.bufferDeviceAddress) && (bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay) && (bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice); # endif } bool operator!=(PhysicalDeviceBufferDeviceAddressFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT) == sizeof(VkPhysicalDeviceBufferDeviceAddressFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceBufferDeviceAddressFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceBufferDeviceAddressFeaturesEXT; }; using PhysicalDeviceBufferAddressFeaturesEXT = PhysicalDeviceBufferDeviceAddressFeaturesEXT; struct PhysicalDeviceCoherentMemoryFeaturesAMD { using NativeType = VkPhysicalDeviceCoherentMemoryFeaturesAMD; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD(VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), deviceCoherentMemory(deviceCoherentMemory_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD(PhysicalDeviceCoherentMemoryFeaturesAMD const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCoherentMemoryFeaturesAMD(VkPhysicalDeviceCoherentMemoryFeaturesAMD const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceCoherentMemoryFeaturesAMD(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceCoherentMemoryFeaturesAMD &operator=(PhysicalDeviceCoherentMemoryFeaturesAMD const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCoherentMemoryFeaturesAMD &operator=(VkPhysicalDeviceCoherentMemoryFeaturesAMD const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoherentMemoryFeaturesAMD &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoherentMemoryFeaturesAMD & setDeviceCoherentMemory(VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_) VULKAN_HPP_NOEXCEPT { deviceCoherentMemory = deviceCoherentMemory_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceCoherentMemoryFeaturesAMD &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, deviceCoherentMemory); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceCoherentMemoryFeaturesAMD const &) const = default; #else bool operator==(PhysicalDeviceCoherentMemoryFeaturesAMD const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (deviceCoherentMemory == rhs.deviceCoherentMemory); # endif } bool operator!=(PhysicalDeviceCoherentMemoryFeaturesAMD const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD) == sizeof(VkPhysicalDeviceCoherentMemoryFeaturesAMD), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceCoherentMemoryFeaturesAMD is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceCoherentMemoryFeaturesAMD; }; struct PhysicalDeviceColorWriteEnableFeaturesEXT { using NativeType = VkPhysicalDeviceColorWriteEnableFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), colorWriteEnable(colorWriteEnable_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT(PhysicalDeviceColorWriteEnableFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceColorWriteEnableFeaturesEXT(VkPhysicalDeviceColorWriteEnableFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceColorWriteEnableFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceColorWriteEnableFeaturesEXT &operator=(PhysicalDeviceColorWriteEnableFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceColorWriteEnableFeaturesEXT &operator=(VkPhysicalDeviceColorWriteEnableFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceColorWriteEnableFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceColorWriteEnableFeaturesEXT & setColorWriteEnable(VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable_) VULKAN_HPP_NOEXCEPT { colorWriteEnable = colorWriteEnable_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceColorWriteEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceColorWriteEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, colorWriteEnable); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceColorWriteEnableFeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceColorWriteEnableFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (colorWriteEnable == rhs.colorWriteEnable); # endif } bool operator!=(PhysicalDeviceColorWriteEnableFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT) == sizeof(VkPhysicalDeviceColorWriteEnableFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceColorWriteEnableFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceColorWriteEnableFeaturesEXT; }; struct PhysicalDeviceComputeShaderDerivativesFeaturesNV { using NativeType = VkPhysicalDeviceComputeShaderDerivativesFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ = {}, VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), computeDerivativeGroupQuads(computeDerivativeGroupQuads_), computeDerivativeGroupLinear(computeDerivativeGroupLinear_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV(PhysicalDeviceComputeShaderDerivativesFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceComputeShaderDerivativesFeaturesNV(VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceComputeShaderDerivativesFeaturesNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceComputeShaderDerivativesFeaturesNV &operator=(PhysicalDeviceComputeShaderDerivativesFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceComputeShaderDerivativesFeaturesNV &operator=(VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesNV &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesNV & setComputeDerivativeGroupQuads(VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_) VULKAN_HPP_NOEXCEPT { computeDerivativeGroupQuads = computeDerivativeGroupQuads_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesNV & setComputeDerivativeGroupLinear(VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_) VULKAN_HPP_NOEXCEPT { computeDerivativeGroupLinear = computeDerivativeGroupLinear_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, computeDerivativeGroupQuads, computeDerivativeGroupLinear); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceComputeShaderDerivativesFeaturesNV const &) const = default; #else bool operator==(PhysicalDeviceComputeShaderDerivativesFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (computeDerivativeGroupQuads == rhs.computeDerivativeGroupQuads) && (computeDerivativeGroupLinear == rhs.computeDerivativeGroupLinear); # endif } bool operator!=(PhysicalDeviceComputeShaderDerivativesFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads = {}; VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV) == sizeof(VkPhysicalDeviceComputeShaderDerivativesFeaturesNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceComputeShaderDerivativesFeaturesNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceComputeShaderDerivativesFeaturesNV; }; struct PhysicalDeviceConditionalRenderingFeaturesEXT { using NativeType = VkPhysicalDeviceConditionalRenderingFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ = {}, VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), conditionalRendering(conditionalRendering_), inheritedConditionalRendering(inheritedConditionalRendering_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT(PhysicalDeviceConditionalRenderingFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceConditionalRenderingFeaturesEXT(VkPhysicalDeviceConditionalRenderingFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceConditionalRenderingFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceConditionalRenderingFeaturesEXT &operator=(PhysicalDeviceConditionalRenderingFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceConditionalRenderingFeaturesEXT &operator=(VkPhysicalDeviceConditionalRenderingFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & setConditionalRendering(VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_) VULKAN_HPP_NOEXCEPT { conditionalRendering = conditionalRendering_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & setInheritedConditionalRendering(VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_) VULKAN_HPP_NOEXCEPT { inheritedConditionalRendering = inheritedConditionalRendering_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceConditionalRenderingFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, conditionalRendering, inheritedConditionalRendering); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceConditionalRenderingFeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceConditionalRenderingFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (conditionalRendering == rhs.conditionalRendering) && (inheritedConditionalRendering == rhs.inheritedConditionalRendering); # endif } bool operator!=(PhysicalDeviceConditionalRenderingFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering = {}; VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT) == sizeof(VkPhysicalDeviceConditionalRenderingFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceConditionalRenderingFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceConditionalRenderingFeaturesEXT; }; struct PhysicalDeviceConservativeRasterizationPropertiesEXT { using NativeType = VkPhysicalDeviceConservativeRasterizationPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT(float primitiveOverestimationSize_ = {}, float maxExtraPrimitiveOverestimationSize_ = {}, float extraPrimitiveOverestimationSizeGranularity_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization_ = {}, VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized_ = {}, VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), primitiveOverestimationSize(primitiveOverestimationSize_), maxExtraPrimitiveOverestimationSize(maxExtraPrimitiveOverestimationSize_), extraPrimitiveOverestimationSizeGranularity(extraPrimitiveOverestimationSizeGranularity_), primitiveUnderestimation(primitiveUnderestimation_), conservativePointAndLineRasterization(conservativePointAndLineRasterization_), degenerateTrianglesRasterized(degenerateTrianglesRasterized_), degenerateLinesRasterized(degenerateLinesRasterized_), fullyCoveredFragmentShaderInputVariable(fullyCoveredFragmentShaderInputVariable_), conservativeRasterizationPostDepthCoverage(conservativeRasterizationPostDepthCoverage_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT(PhysicalDeviceConservativeRasterizationPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceConservativeRasterizationPropertiesEXT(VkPhysicalDeviceConservativeRasterizationPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceConservativeRasterizationPropertiesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=(PhysicalDeviceConservativeRasterizationPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceConservativeRasterizationPropertiesEXT &operator=(VkPhysicalDeviceConservativeRasterizationPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, primitiveOverestimationSize, maxExtraPrimitiveOverestimationSize, extraPrimitiveOverestimationSizeGranularity, primitiveUnderestimation, conservativePointAndLineRasterization, degenerateTrianglesRasterized, degenerateLinesRasterized, fullyCoveredFragmentShaderInputVariable, conservativeRasterizationPostDepthCoverage); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceConservativeRasterizationPropertiesEXT const &) const = default; #else bool operator==(PhysicalDeviceConservativeRasterizationPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (primitiveOverestimationSize == rhs.primitiveOverestimationSize) && (maxExtraPrimitiveOverestimationSize == rhs.maxExtraPrimitiveOverestimationSize) && (extraPrimitiveOverestimationSizeGranularity == rhs.extraPrimitiveOverestimationSizeGranularity) && (primitiveUnderestimation == rhs.primitiveUnderestimation) && (conservativePointAndLineRasterization == rhs.conservativePointAndLineRasterization) && (degenerateTrianglesRasterized == rhs.degenerateTrianglesRasterized) && (degenerateLinesRasterized == rhs.degenerateLinesRasterized) && (fullyCoveredFragmentShaderInputVariable == rhs.fullyCoveredFragmentShaderInputVariable) && (conservativeRasterizationPostDepthCoverage == rhs.conservativeRasterizationPostDepthCoverage); # endif } bool operator!=(PhysicalDeviceConservativeRasterizationPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT; void *pNext = {}; float primitiveOverestimationSize = {}; float maxExtraPrimitiveOverestimationSize = {}; float extraPrimitiveOverestimationSizeGranularity = {}; VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation = {}; VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization = {}; VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized = {}; VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized = {}; VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable = {}; VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT) == sizeof(VkPhysicalDeviceConservativeRasterizationPropertiesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceConservativeRasterizationPropertiesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceConservativeRasterizationPropertiesEXT; }; struct PhysicalDeviceCooperativeMatrixFeaturesNV { using NativeType = VkPhysicalDeviceCooperativeMatrixFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {}, VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), cooperativeMatrix(cooperativeMatrix_), cooperativeMatrixRobustBufferAccess(cooperativeMatrixRobustBufferAccess_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV(PhysicalDeviceCooperativeMatrixFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCooperativeMatrixFeaturesNV(VkPhysicalDeviceCooperativeMatrixFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceCooperativeMatrixFeaturesNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceCooperativeMatrixFeaturesNV &operator=(PhysicalDeviceCooperativeMatrixFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCooperativeMatrixFeaturesNV &operator=(VkPhysicalDeviceCooperativeMatrixFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrix(VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_) VULKAN_HPP_NOEXCEPT { cooperativeMatrix = cooperativeMatrix_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrixRobustBufferAccess(VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_) VULKAN_HPP_NOEXCEPT { cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceCooperativeMatrixFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, cooperativeMatrix, cooperativeMatrixRobustBufferAccess); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceCooperativeMatrixFeaturesNV const &) const = default; #else bool operator==(PhysicalDeviceCooperativeMatrixFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (cooperativeMatrix == rhs.cooperativeMatrix) && (cooperativeMatrixRobustBufferAccess == rhs.cooperativeMatrixRobustBufferAccess); # endif } bool operator!=(PhysicalDeviceCooperativeMatrixFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix = {}; VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV) == sizeof(VkPhysicalDeviceCooperativeMatrixFeaturesNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceCooperativeMatrixFeaturesNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceCooperativeMatrixFeaturesNV; }; struct PhysicalDeviceCooperativeMatrixPropertiesNV { using NativeType = VkPhysicalDeviceCooperativeMatrixPropertiesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV(VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), cooperativeMatrixSupportedStages(cooperativeMatrixSupportedStages_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV(PhysicalDeviceCooperativeMatrixPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCooperativeMatrixPropertiesNV(VkPhysicalDeviceCooperativeMatrixPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceCooperativeMatrixPropertiesNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceCooperativeMatrixPropertiesNV &operator=(PhysicalDeviceCooperativeMatrixPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCooperativeMatrixPropertiesNV &operator=(VkPhysicalDeviceCooperativeMatrixPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, cooperativeMatrixSupportedStages); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceCooperativeMatrixPropertiesNV const &) const = default; #else bool operator==(PhysicalDeviceCooperativeMatrixPropertiesNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages); # endif } bool operator!=(PhysicalDeviceCooperativeMatrixPropertiesNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV; void *pNext = {}; VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV) == sizeof(VkPhysicalDeviceCooperativeMatrixPropertiesNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceCooperativeMatrixPropertiesNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceCooperativeMatrixPropertiesNV; }; struct PhysicalDeviceCornerSampledImageFeaturesNV { using NativeType = VkPhysicalDeviceCornerSampledImageFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), cornerSampledImage(cornerSampledImage_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV(PhysicalDeviceCornerSampledImageFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCornerSampledImageFeaturesNV(VkPhysicalDeviceCornerSampledImageFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceCornerSampledImageFeaturesNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceCornerSampledImageFeaturesNV &operator=(PhysicalDeviceCornerSampledImageFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCornerSampledImageFeaturesNV &operator=(VkPhysicalDeviceCornerSampledImageFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCornerSampledImageFeaturesNV &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCornerSampledImageFeaturesNV & setCornerSampledImage(VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_) VULKAN_HPP_NOEXCEPT { cornerSampledImage = cornerSampledImage_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceCornerSampledImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceCornerSampledImageFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, cornerSampledImage); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceCornerSampledImageFeaturesNV const &) const = default; #else bool operator==(PhysicalDeviceCornerSampledImageFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (cornerSampledImage == rhs.cornerSampledImage); # endif } bool operator!=(PhysicalDeviceCornerSampledImageFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV) == sizeof(VkPhysicalDeviceCornerSampledImageFeaturesNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceCornerSampledImageFeaturesNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceCornerSampledImageFeaturesNV; }; struct PhysicalDeviceCoverageReductionModeFeaturesNV { using NativeType = VkPhysicalDeviceCoverageReductionModeFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), coverageReductionMode(coverageReductionMode_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV(PhysicalDeviceCoverageReductionModeFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCoverageReductionModeFeaturesNV(VkPhysicalDeviceCoverageReductionModeFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceCoverageReductionModeFeaturesNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceCoverageReductionModeFeaturesNV &operator=(PhysicalDeviceCoverageReductionModeFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCoverageReductionModeFeaturesNV &operator=(VkPhysicalDeviceCoverageReductionModeFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV & setCoverageReductionMode(VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_) VULKAN_HPP_NOEXCEPT { coverageReductionMode = coverageReductionMode_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceCoverageReductionModeFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, coverageReductionMode); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceCoverageReductionModeFeaturesNV const &) const = default; #else bool operator==(PhysicalDeviceCoverageReductionModeFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (coverageReductionMode == rhs.coverageReductionMode); # endif } bool operator!=(PhysicalDeviceCoverageReductionModeFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV) == sizeof(VkPhysicalDeviceCoverageReductionModeFeaturesNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceCoverageReductionModeFeaturesNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceCoverageReductionModeFeaturesNV; }; struct PhysicalDeviceCustomBorderColorFeaturesEXT { using NativeType = VkPhysicalDeviceCustomBorderColorFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ = {}, VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), customBorderColors(customBorderColors_), customBorderColorWithoutFormat(customBorderColorWithoutFormat_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT(PhysicalDeviceCustomBorderColorFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCustomBorderColorFeaturesEXT(VkPhysicalDeviceCustomBorderColorFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceCustomBorderColorFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceCustomBorderColorFeaturesEXT &operator=(PhysicalDeviceCustomBorderColorFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCustomBorderColorFeaturesEXT &operator=(VkPhysicalDeviceCustomBorderColorFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & setCustomBorderColors(VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_) VULKAN_HPP_NOEXCEPT { customBorderColors = customBorderColors_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & setCustomBorderColorWithoutFormat(VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_) VULKAN_HPP_NOEXCEPT { customBorderColorWithoutFormat = customBorderColorWithoutFormat_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceCustomBorderColorFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceCustomBorderColorFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, customBorderColors, customBorderColorWithoutFormat); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceCustomBorderColorFeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceCustomBorderColorFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (customBorderColors == rhs.customBorderColors) && (customBorderColorWithoutFormat == rhs.customBorderColorWithoutFormat); # endif } bool operator!=(PhysicalDeviceCustomBorderColorFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 customBorderColors = {}; VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT) == sizeof(VkPhysicalDeviceCustomBorderColorFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceCustomBorderColorFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceCustomBorderColorFeaturesEXT; }; struct PhysicalDeviceCustomBorderColorPropertiesEXT { using NativeType = VkPhysicalDeviceCustomBorderColorPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT(uint32_t maxCustomBorderColorSamplers_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), maxCustomBorderColorSamplers(maxCustomBorderColorSamplers_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT(PhysicalDeviceCustomBorderColorPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCustomBorderColorPropertiesEXT(VkPhysicalDeviceCustomBorderColorPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceCustomBorderColorPropertiesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceCustomBorderColorPropertiesEXT &operator=(PhysicalDeviceCustomBorderColorPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCustomBorderColorPropertiesEXT &operator=(VkPhysicalDeviceCustomBorderColorPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceCustomBorderColorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceCustomBorderColorPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, maxCustomBorderColorSamplers); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceCustomBorderColorPropertiesEXT const &) const = default; #else bool operator==(PhysicalDeviceCustomBorderColorPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (maxCustomBorderColorSamplers == rhs.maxCustomBorderColorSamplers); # endif } bool operator!=(PhysicalDeviceCustomBorderColorPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT; void *pNext = {}; uint32_t maxCustomBorderColorSamplers = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT) == sizeof(VkPhysicalDeviceCustomBorderColorPropertiesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceCustomBorderColorPropertiesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceCustomBorderColorPropertiesEXT; }; struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV { using NativeType = VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), dedicatedAllocationImageAliasing(dedicatedAllocationImageAliasing_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV(PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV(VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & operator=(PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & operator=(VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setDedicatedAllocationImageAliasing(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_) VULKAN_HPP_NOEXCEPT { dedicatedAllocationImageAliasing = dedicatedAllocationImageAliasing_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, dedicatedAllocationImageAliasing); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const &) const = default; #else bool operator==(PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (dedicatedAllocationImageAliasing == rhs.dedicatedAllocationImageAliasing); # endif } bool operator!=(PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV) == sizeof(VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; }; struct PhysicalDeviceDepthClipControlFeaturesEXT { using NativeType = VkPhysicalDeviceDepthClipControlFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipControlFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 depthClipControl_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), depthClipControl(depthClipControl_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipControlFeaturesEXT(PhysicalDeviceDepthClipControlFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDepthClipControlFeaturesEXT(VkPhysicalDeviceDepthClipControlFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDepthClipControlFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceDepthClipControlFeaturesEXT &operator=(PhysicalDeviceDepthClipControlFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDepthClipControlFeaturesEXT &operator=(VkPhysicalDeviceDepthClipControlFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipControlFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipControlFeaturesEXT & setDepthClipControl(VULKAN_HPP_NAMESPACE::Bool32 depthClipControl_) VULKAN_HPP_NOEXCEPT { depthClipControl = depthClipControl_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceDepthClipControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceDepthClipControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, depthClipControl); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceDepthClipControlFeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceDepthClipControlFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (depthClipControl == rhs.depthClipControl); # endif } bool operator!=(PhysicalDeviceDepthClipControlFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 depthClipControl = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT) == sizeof(VkPhysicalDeviceDepthClipControlFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceDepthClipControlFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceDepthClipControlFeaturesEXT; }; struct PhysicalDeviceDepthClipEnableFeaturesEXT { using NativeType = VkPhysicalDeviceDepthClipEnableFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), depthClipEnable(depthClipEnable_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT(PhysicalDeviceDepthClipEnableFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDepthClipEnableFeaturesEXT(VkPhysicalDeviceDepthClipEnableFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDepthClipEnableFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceDepthClipEnableFeaturesEXT &operator=(PhysicalDeviceDepthClipEnableFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDepthClipEnableFeaturesEXT &operator=(VkPhysicalDeviceDepthClipEnableFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT &setDepthClipEnable(VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_) VULKAN_HPP_NOEXCEPT { depthClipEnable = depthClipEnable_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceDepthClipEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, depthClipEnable); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceDepthClipEnableFeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceDepthClipEnableFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (depthClipEnable == rhs.depthClipEnable); # endif } bool operator!=(PhysicalDeviceDepthClipEnableFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT) == sizeof(VkPhysicalDeviceDepthClipEnableFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceDepthClipEnableFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceDepthClipEnableFeaturesEXT; }; struct PhysicalDeviceDepthStencilResolveProperties { using NativeType = VkPhysicalDeviceDepthStencilResolveProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthStencilResolveProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties(VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), supportedDepthResolveModes(supportedDepthResolveModes_), supportedStencilResolveModes(supportedStencilResolveModes_), independentResolveNone(independentResolveNone_), independentResolve(independentResolve_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties(PhysicalDeviceDepthStencilResolveProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDepthStencilResolveProperties(VkPhysicalDeviceDepthStencilResolveProperties const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDepthStencilResolveProperties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceDepthStencilResolveProperties &operator=(PhysicalDeviceDepthStencilResolveProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDepthStencilResolveProperties &operator=(VkPhysicalDeviceDepthStencilResolveProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceDepthStencilResolveProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceDepthStencilResolveProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, supportedDepthResolveModes, supportedStencilResolveModes, independentResolveNone, independentResolve); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceDepthStencilResolveProperties const &) const = default; #else bool operator==(PhysicalDeviceDepthStencilResolveProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (supportedDepthResolveModes == rhs.supportedDepthResolveModes) && (supportedStencilResolveModes == rhs.supportedStencilResolveModes) && (independentResolveNone == rhs.independentResolveNone) && (independentResolve == rhs.independentResolve); # endif } bool operator!=(PhysicalDeviceDepthStencilResolveProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthStencilResolveProperties; void *pNext = {}; VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {}; VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {}; VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {}; VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties) == sizeof(VkPhysicalDeviceDepthStencilResolveProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceDepthStencilResolveProperties is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceDepthStencilResolveProperties; }; using PhysicalDeviceDepthStencilResolvePropertiesKHR = PhysicalDeviceDepthStencilResolveProperties; struct PhysicalDeviceDescriptorIndexingFeatures { using NativeType = VkPhysicalDeviceDescriptorIndexingFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), shaderInputAttachmentArrayDynamicIndexing(shaderInputAttachmentArrayDynamicIndexing_), shaderUniformTexelBufferArrayDynamicIndexing(shaderUniformTexelBufferArrayDynamicIndexing_), shaderStorageTexelBufferArrayDynamicIndexing(shaderStorageTexelBufferArrayDynamicIndexing_), shaderUniformBufferArrayNonUniformIndexing(shaderUniformBufferArrayNonUniformIndexing_), shaderSampledImageArrayNonUniformIndexing(shaderSampledImageArrayNonUniformIndexing_), shaderStorageBufferArrayNonUniformIndexing(shaderStorageBufferArrayNonUniformIndexing_), shaderStorageImageArrayNonUniformIndexing(shaderStorageImageArrayNonUniformIndexing_), shaderInputAttachmentArrayNonUniformIndexing(shaderInputAttachmentArrayNonUniformIndexing_), shaderUniformTexelBufferArrayNonUniformIndexing(shaderUniformTexelBufferArrayNonUniformIndexing_), shaderStorageTexelBufferArrayNonUniformIndexing(shaderStorageTexelBufferArrayNonUniformIndexing_), descriptorBindingUniformBufferUpdateAfterBind(descriptorBindingUniformBufferUpdateAfterBind_), descriptorBindingSampledImageUpdateAfterBind(descriptorBindingSampledImageUpdateAfterBind_), descriptorBindingStorageImageUpdateAfterBind(descriptorBindingStorageImageUpdateAfterBind_), descriptorBindingStorageBufferUpdateAfterBind(descriptorBindingStorageBufferUpdateAfterBind_), descriptorBindingUniformTexelBufferUpdateAfterBind(descriptorBindingUniformTexelBufferUpdateAfterBind_), descriptorBindingStorageTexelBufferUpdateAfterBind(descriptorBindingStorageTexelBufferUpdateAfterBind_), descriptorBindingUpdateUnusedWhilePending(descriptorBindingUpdateUnusedWhilePending_), descriptorBindingPartiallyBound(descriptorBindingPartiallyBound_), descriptorBindingVariableDescriptorCount(descriptorBindingVariableDescriptorCount_), runtimeDescriptorArray(runtimeDescriptorArray_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures(PhysicalDeviceDescriptorIndexingFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDescriptorIndexingFeatures(VkPhysicalDeviceDescriptorIndexingFeatures const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDescriptorIndexingFeatures(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceDescriptorIndexingFeatures &operator=(PhysicalDeviceDescriptorIndexingFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDescriptorIndexingFeatures &operator=(VkPhysicalDeviceDescriptorIndexingFeatures const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayDynamicIndexing(VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_) VULKAN_HPP_NOEXCEPT { shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformTexelBufferArrayDynamicIndexing(VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_) VULKAN_HPP_NOEXCEPT { shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageTexelBufferArrayDynamicIndexing(VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_) VULKAN_HPP_NOEXCEPT { shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformBufferArrayNonUniformIndexing(VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_) VULKAN_HPP_NOEXCEPT { shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderSampledImageArrayNonUniformIndexing(VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_) VULKAN_HPP_NOEXCEPT { shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageBufferArrayNonUniformIndexing(VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_) VULKAN_HPP_NOEXCEPT { shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageImageArrayNonUniformIndexing(VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_) VULKAN_HPP_NOEXCEPT { shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayNonUniformIndexing(VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_) VULKAN_HPP_NOEXCEPT { shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformTexelBufferArrayNonUniformIndexing(VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_) VULKAN_HPP_NOEXCEPT { shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageTexelBufferArrayNonUniformIndexing(VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_) VULKAN_HPP_NOEXCEPT { shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformBufferUpdateAfterBind(VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_) VULKAN_HPP_NOEXCEPT { descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingSampledImageUpdateAfterBind(VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_) VULKAN_HPP_NOEXCEPT { descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageImageUpdateAfterBind(VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_) VULKAN_HPP_NOEXCEPT { descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageBufferUpdateAfterBind(VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_) VULKAN_HPP_NOEXCEPT { descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &setDescriptorBindingUniformTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_) VULKAN_HPP_NOEXCEPT { descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &setDescriptorBindingStorageTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_) VULKAN_HPP_NOEXCEPT { descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUpdateUnusedWhilePending(VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_) VULKAN_HPP_NOEXCEPT { descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingPartiallyBound(VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_) VULKAN_HPP_NOEXCEPT { descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingVariableDescriptorCount(VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_) VULKAN_HPP_NOEXCEPT { descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setRuntimeDescriptorArray(VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_) VULKAN_HPP_NOEXCEPT { runtimeDescriptorArray = runtimeDescriptorArray_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceDescriptorIndexingFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceDescriptorIndexingFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, shaderInputAttachmentArrayDynamicIndexing, shaderUniformTexelBufferArrayDynamicIndexing, shaderStorageTexelBufferArrayDynamicIndexing, shaderUniformBufferArrayNonUniformIndexing, shaderSampledImageArrayNonUniformIndexing, shaderStorageBufferArrayNonUniformIndexing, shaderStorageImageArrayNonUniformIndexing, shaderInputAttachmentArrayNonUniformIndexing, shaderUniformTexelBufferArrayNonUniformIndexing, shaderStorageTexelBufferArrayNonUniformIndexing, descriptorBindingUniformBufferUpdateAfterBind, descriptorBindingSampledImageUpdateAfterBind, descriptorBindingStorageImageUpdateAfterBind, descriptorBindingStorageBufferUpdateAfterBind, descriptorBindingUniformTexelBufferUpdateAfterBind, descriptorBindingStorageTexelBufferUpdateAfterBind, descriptorBindingUpdateUnusedWhilePending, descriptorBindingPartiallyBound, descriptorBindingVariableDescriptorCount, runtimeDescriptorArray); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceDescriptorIndexingFeatures const &) const = default; #else bool operator==(PhysicalDeviceDescriptorIndexingFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing) && (shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing) && (shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing) && (shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing) && (shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing) && (shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing) && (shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing) && (shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing) && (shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing) && (shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing) && (descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind) && (descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind) && (descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind) && (descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind) && (descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind) && (descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind) && (descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending) && (descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound) && (descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount) && (runtimeDescriptorArray == rhs.runtimeDescriptorArray); # endif } bool operator!=(PhysicalDeviceDescriptorIndexingFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {}; VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {}; VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {}; VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {}; VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {}; VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {}; VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {}; VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {}; VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {}; VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {}; VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures) == sizeof(VkPhysicalDeviceDescriptorIndexingFeatures), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceDescriptorIndexingFeatures is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceDescriptorIndexingFeatures; }; using PhysicalDeviceDescriptorIndexingFeaturesEXT = PhysicalDeviceDescriptorIndexingFeatures; struct PhysicalDeviceDescriptorIndexingProperties { using NativeType = VkPhysicalDeviceDescriptorIndexingProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties(uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, uint32_t maxPerStageUpdateAfterBindResources_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), maxUpdateAfterBindDescriptorsInAllPools(maxUpdateAfterBindDescriptorsInAllPools_), shaderUniformBufferArrayNonUniformIndexingNative(shaderUniformBufferArrayNonUniformIndexingNative_), shaderSampledImageArrayNonUniformIndexingNative(shaderSampledImageArrayNonUniformIndexingNative_), shaderStorageBufferArrayNonUniformIndexingNative(shaderStorageBufferArrayNonUniformIndexingNative_), shaderStorageImageArrayNonUniformIndexingNative(shaderStorageImageArrayNonUniformIndexingNative_), shaderInputAttachmentArrayNonUniformIndexingNative(shaderInputAttachmentArrayNonUniformIndexingNative_), robustBufferAccessUpdateAfterBind(robustBufferAccessUpdateAfterBind_), quadDivergentImplicitLod(quadDivergentImplicitLod_), maxPerStageDescriptorUpdateAfterBindSamplers(maxPerStageDescriptorUpdateAfterBindSamplers_), maxPerStageDescriptorUpdateAfterBindUniformBuffers(maxPerStageDescriptorUpdateAfterBindUniformBuffers_), maxPerStageDescriptorUpdateAfterBindStorageBuffers(maxPerStageDescriptorUpdateAfterBindStorageBuffers_), maxPerStageDescriptorUpdateAfterBindSampledImages(maxPerStageDescriptorUpdateAfterBindSampledImages_), maxPerStageDescriptorUpdateAfterBindStorageImages(maxPerStageDescriptorUpdateAfterBindStorageImages_), maxPerStageDescriptorUpdateAfterBindInputAttachments(maxPerStageDescriptorUpdateAfterBindInputAttachments_), maxPerStageUpdateAfterBindResources(maxPerStageUpdateAfterBindResources_), maxDescriptorSetUpdateAfterBindSamplers(maxDescriptorSetUpdateAfterBindSamplers_), maxDescriptorSetUpdateAfterBindUniformBuffers(maxDescriptorSetUpdateAfterBindUniformBuffers_), maxDescriptorSetUpdateAfterBindUniformBuffersDynamic(maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_), maxDescriptorSetUpdateAfterBindStorageBuffers(maxDescriptorSetUpdateAfterBindStorageBuffers_), maxDescriptorSetUpdateAfterBindStorageBuffersDynamic(maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_), maxDescriptorSetUpdateAfterBindSampledImages(maxDescriptorSetUpdateAfterBindSampledImages_), maxDescriptorSetUpdateAfterBindStorageImages(maxDescriptorSetUpdateAfterBindStorageImages_), maxDescriptorSetUpdateAfterBindInputAttachments(maxDescriptorSetUpdateAfterBindInputAttachments_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties(PhysicalDeviceDescriptorIndexingProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDescriptorIndexingProperties(VkPhysicalDeviceDescriptorIndexingProperties const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDescriptorIndexingProperties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceDescriptorIndexingProperties &operator=(PhysicalDeviceDescriptorIndexingProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDescriptorIndexingProperties &operator=(VkPhysicalDeviceDescriptorIndexingProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceDescriptorIndexingProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceDescriptorIndexingProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, maxUpdateAfterBindDescriptorsInAllPools, shaderUniformBufferArrayNonUniformIndexingNative, shaderSampledImageArrayNonUniformIndexingNative, shaderStorageBufferArrayNonUniformIndexingNative, shaderStorageImageArrayNonUniformIndexingNative, shaderInputAttachmentArrayNonUniformIndexingNative, robustBufferAccessUpdateAfterBind, quadDivergentImplicitLod, maxPerStageDescriptorUpdateAfterBindSamplers, maxPerStageDescriptorUpdateAfterBindUniformBuffers, maxPerStageDescriptorUpdateAfterBindStorageBuffers, maxPerStageDescriptorUpdateAfterBindSampledImages, maxPerStageDescriptorUpdateAfterBindStorageImages, maxPerStageDescriptorUpdateAfterBindInputAttachments, maxPerStageUpdateAfterBindResources, maxDescriptorSetUpdateAfterBindSamplers, maxDescriptorSetUpdateAfterBindUniformBuffers, maxDescriptorSetUpdateAfterBindUniformBuffersDynamic, maxDescriptorSetUpdateAfterBindStorageBuffers, maxDescriptorSetUpdateAfterBindStorageBuffersDynamic, maxDescriptorSetUpdateAfterBindSampledImages, maxDescriptorSetUpdateAfterBindStorageImages, maxDescriptorSetUpdateAfterBindInputAttachments); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceDescriptorIndexingProperties const &) const = default; #else bool operator==(PhysicalDeviceDescriptorIndexingProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools) && (shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative) && (shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative) && (shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative) && (shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative) && (shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative) && (robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind) && (quadDivergentImplicitLod == rhs.quadDivergentImplicitLod) && (maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers) && (maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers) && (maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers) && (maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages) && (maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages) && (maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments) && (maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources) && (maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers) && (maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers) && (maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic) && (maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers) && (maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic) && (maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages) && (maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages) && (maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments); # endif } bool operator!=(PhysicalDeviceDescriptorIndexingProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingProperties; void *pNext = {}; uint32_t maxUpdateAfterBindDescriptorsInAllPools = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {}; VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {}; VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {}; uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {}; uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {}; uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {}; uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {}; uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {}; uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {}; uint32_t maxPerStageUpdateAfterBindResources = {}; uint32_t maxDescriptorSetUpdateAfterBindSamplers = {}; uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {}; uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {}; uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {}; uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {}; uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {}; uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {}; uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties) == sizeof(VkPhysicalDeviceDescriptorIndexingProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceDescriptorIndexingProperties is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceDescriptorIndexingProperties; }; using PhysicalDeviceDescriptorIndexingPropertiesEXT = PhysicalDeviceDescriptorIndexingProperties; struct PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE { using NativeType = VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE(VULKAN_HPP_NAMESPACE::Bool32 descriptorSetHostMapping_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), descriptorSetHostMapping(descriptorSetHostMapping_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE(PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE(VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & operator=(PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE &operator=(VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & setDescriptorSetHostMapping(VULKAN_HPP_NAMESPACE::Bool32 descriptorSetHostMapping_) VULKAN_HPP_NOEXCEPT { descriptorSetHostMapping = descriptorSetHostMapping_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, descriptorSetHostMapping); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const &) const = default; #else bool operator==(PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (descriptorSetHostMapping == rhs.descriptorSetHostMapping); # endif } bool operator!=(PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 descriptorSetHostMapping = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE) == sizeof(VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; }; struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV { using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), deviceGeneratedCommands(deviceGeneratedCommands_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV(PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDeviceGeneratedCommandsFeaturesNV(VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDeviceGeneratedCommandsFeaturesNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceDeviceGeneratedCommandsFeaturesNV &operator=(PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDeviceGeneratedCommandsFeaturesNV &operator=(VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setDeviceGeneratedCommands(VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_) VULKAN_HPP_NOEXCEPT { deviceGeneratedCommands = deviceGeneratedCommands_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, deviceGeneratedCommands); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const &) const = default; #else bool operator==(PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (deviceGeneratedCommands == rhs.deviceGeneratedCommands); # endif } bool operator!=(PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV) == sizeof(VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceDeviceGeneratedCommandsFeaturesNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesNV; }; struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV { using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV(uint32_t maxGraphicsShaderGroupCount_ = {}, uint32_t maxIndirectSequenceCount_ = {}, uint32_t maxIndirectCommandsTokenCount_ = {}, uint32_t maxIndirectCommandsStreamCount_ = {}, uint32_t maxIndirectCommandsTokenOffset_ = {}, uint32_t maxIndirectCommandsStreamStride_ = {}, uint32_t minSequencesCountBufferOffsetAlignment_ = {}, uint32_t minSequencesIndexBufferOffsetAlignment_ = {}, uint32_t minIndirectCommandsBufferOffsetAlignment_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), maxGraphicsShaderGroupCount(maxGraphicsShaderGroupCount_), maxIndirectSequenceCount(maxIndirectSequenceCount_), maxIndirectCommandsTokenCount(maxIndirectCommandsTokenCount_), maxIndirectCommandsStreamCount(maxIndirectCommandsStreamCount_), maxIndirectCommandsTokenOffset(maxIndirectCommandsTokenOffset_), maxIndirectCommandsStreamStride(maxIndirectCommandsStreamStride_), minSequencesCountBufferOffsetAlignment(minSequencesCountBufferOffsetAlignment_), minSequencesIndexBufferOffsetAlignment(minSequencesIndexBufferOffsetAlignment_), minIndirectCommandsBufferOffsetAlignment(minIndirectCommandsBufferOffsetAlignment_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV(PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDeviceGeneratedCommandsPropertiesNV(VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDeviceGeneratedCommandsPropertiesNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & operator=(PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDeviceGeneratedCommandsPropertiesNV &operator=(VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, maxGraphicsShaderGroupCount, maxIndirectSequenceCount, maxIndirectCommandsTokenCount, maxIndirectCommandsStreamCount, maxIndirectCommandsTokenOffset, maxIndirectCommandsStreamStride, minSequencesCountBufferOffsetAlignment, minSequencesIndexBufferOffsetAlignment, minIndirectCommandsBufferOffsetAlignment); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const &) const = default; #else bool operator==(PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (maxGraphicsShaderGroupCount == rhs.maxGraphicsShaderGroupCount) && (maxIndirectSequenceCount == rhs.maxIndirectSequenceCount) && (maxIndirectCommandsTokenCount == rhs.maxIndirectCommandsTokenCount) && (maxIndirectCommandsStreamCount == rhs.maxIndirectCommandsStreamCount) && (maxIndirectCommandsTokenOffset == rhs.maxIndirectCommandsTokenOffset) && (maxIndirectCommandsStreamStride == rhs.maxIndirectCommandsStreamStride) && (minSequencesCountBufferOffsetAlignment == rhs.minSequencesCountBufferOffsetAlignment) && (minSequencesIndexBufferOffsetAlignment == rhs.minSequencesIndexBufferOffsetAlignment) && (minIndirectCommandsBufferOffsetAlignment == rhs.minIndirectCommandsBufferOffsetAlignment); # endif } bool operator!=(PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV; void *pNext = {}; uint32_t maxGraphicsShaderGroupCount = {}; uint32_t maxIndirectSequenceCount = {}; uint32_t maxIndirectCommandsTokenCount = {}; uint32_t maxIndirectCommandsStreamCount = {}; uint32_t maxIndirectCommandsTokenOffset = {}; uint32_t maxIndirectCommandsStreamStride = {}; uint32_t minSequencesCountBufferOffsetAlignment = {}; uint32_t minSequencesIndexBufferOffsetAlignment = {}; uint32_t minIndirectCommandsBufferOffsetAlignment = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV) == sizeof(VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceDeviceGeneratedCommandsPropertiesNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceDeviceGeneratedCommandsPropertiesNV; }; struct PhysicalDeviceDeviceMemoryReportFeaturesEXT { using NativeType = VkPhysicalDeviceDeviceMemoryReportFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), deviceMemoryReport(deviceMemoryReport_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT(PhysicalDeviceDeviceMemoryReportFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDeviceMemoryReportFeaturesEXT(VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDeviceMemoryReportFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceDeviceMemoryReportFeaturesEXT &operator=(PhysicalDeviceDeviceMemoryReportFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDeviceMemoryReportFeaturesEXT &operator=(VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceMemoryReportFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceMemoryReportFeaturesEXT & setDeviceMemoryReport(VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_) VULKAN_HPP_NOEXCEPT { deviceMemoryReport = deviceMemoryReport_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, deviceMemoryReport); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceDeviceMemoryReportFeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceDeviceMemoryReportFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (deviceMemoryReport == rhs.deviceMemoryReport); # endif } bool operator!=(PhysicalDeviceDeviceMemoryReportFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT) == sizeof(VkPhysicalDeviceDeviceMemoryReportFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceDeviceMemoryReportFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceDeviceMemoryReportFeaturesEXT; }; struct PhysicalDeviceDiagnosticsConfigFeaturesNV { using NativeType = VkPhysicalDeviceDiagnosticsConfigFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), diagnosticsConfig(diagnosticsConfig_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV(PhysicalDeviceDiagnosticsConfigFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDiagnosticsConfigFeaturesNV(VkPhysicalDeviceDiagnosticsConfigFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDiagnosticsConfigFeaturesNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceDiagnosticsConfigFeaturesNV &operator=(PhysicalDeviceDiagnosticsConfigFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDiagnosticsConfigFeaturesNV &operator=(VkPhysicalDeviceDiagnosticsConfigFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiagnosticsConfigFeaturesNV &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiagnosticsConfigFeaturesNV & setDiagnosticsConfig(VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_) VULKAN_HPP_NOEXCEPT { diagnosticsConfig = diagnosticsConfig_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, diagnosticsConfig); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceDiagnosticsConfigFeaturesNV const &) const = default; #else bool operator==(PhysicalDeviceDiagnosticsConfigFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (diagnosticsConfig == rhs.diagnosticsConfig); # endif } bool operator!=(PhysicalDeviceDiagnosticsConfigFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV) == sizeof(VkPhysicalDeviceDiagnosticsConfigFeaturesNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceDiagnosticsConfigFeaturesNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceDiagnosticsConfigFeaturesNV; }; struct PhysicalDeviceDiscardRectanglePropertiesEXT { using NativeType = VkPhysicalDeviceDiscardRectanglePropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT(uint32_t maxDiscardRectangles_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), maxDiscardRectangles(maxDiscardRectangles_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT(PhysicalDeviceDiscardRectanglePropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDiscardRectanglePropertiesEXT(VkPhysicalDeviceDiscardRectanglePropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDiscardRectanglePropertiesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceDiscardRectanglePropertiesEXT &operator=(PhysicalDeviceDiscardRectanglePropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDiscardRectanglePropertiesEXT &operator=(VkPhysicalDeviceDiscardRectanglePropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceDiscardRectanglePropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, maxDiscardRectangles); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceDiscardRectanglePropertiesEXT const &) const = default; #else bool operator==(PhysicalDeviceDiscardRectanglePropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (maxDiscardRectangles == rhs.maxDiscardRectangles); # endif } bool operator!=(PhysicalDeviceDiscardRectanglePropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT; void *pNext = {}; uint32_t maxDiscardRectangles = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT) == sizeof(VkPhysicalDeviceDiscardRectanglePropertiesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceDiscardRectanglePropertiesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceDiscardRectanglePropertiesEXT; }; struct PhysicalDeviceDriverProperties { using NativeType = VkPhysicalDeviceDriverProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDriverProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties(VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, std::array const &driverName_ = {}, std::array const &driverInfo_ = {}, VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), driverID(driverID_), driverName(driverName_), driverInfo(driverInfo_), conformanceVersion(conformanceVersion_) { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties(PhysicalDeviceDriverProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDriverProperties(VkPhysicalDeviceDriverProperties const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDriverProperties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceDriverProperties &operator=(PhysicalDeviceDriverProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDriverProperties &operator=(VkPhysicalDeviceDriverProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceDriverProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceDriverProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, VULKAN_HPP_NAMESPACE::ConformanceVersion const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, driverID, driverName, driverInfo, conformanceVersion); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceDriverProperties const &) const = default; #else bool operator==(PhysicalDeviceDriverProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (driverID == rhs.driverID) && (driverName == rhs.driverName) && (driverInfo == rhs.driverInfo) && (conformanceVersion == rhs.conformanceVersion); # endif } bool operator!=(PhysicalDeviceDriverProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDriverProperties; void *pNext = {}; VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary; VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverName = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverInfo = {}; VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties) == sizeof(VkPhysicalDeviceDriverProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceDriverProperties is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceDriverProperties; }; using PhysicalDeviceDriverPropertiesKHR = PhysicalDeviceDriverProperties; struct PhysicalDeviceDrmPropertiesEXT { using NativeType = VkPhysicalDeviceDrmPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDrmPropertiesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceDrmPropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 hasPrimary_ = {}, VULKAN_HPP_NAMESPACE::Bool32 hasRender_ = {}, int64_t primaryMajor_ = {}, int64_t primaryMinor_ = {}, int64_t renderMajor_ = {}, int64_t renderMinor_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), hasPrimary(hasPrimary_), hasRender(hasRender_), primaryMajor(primaryMajor_), primaryMinor(primaryMinor_), renderMajor(renderMajor_), renderMinor(renderMinor_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDrmPropertiesEXT(PhysicalDeviceDrmPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDrmPropertiesEXT(VkPhysicalDeviceDrmPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDrmPropertiesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceDrmPropertiesEXT &operator=(PhysicalDeviceDrmPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDrmPropertiesEXT &operator=(VkPhysicalDeviceDrmPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceDrmPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceDrmPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, hasPrimary, hasRender, primaryMajor, primaryMinor, renderMajor, renderMinor); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceDrmPropertiesEXT const &) const = default; #else bool operator==(PhysicalDeviceDrmPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (hasPrimary == rhs.hasPrimary) && (hasRender == rhs.hasRender) && (primaryMajor == rhs.primaryMajor) && (primaryMinor == rhs.primaryMinor) && (renderMajor == rhs.renderMajor) && (renderMinor == rhs.renderMinor); # endif } bool operator!=(PhysicalDeviceDrmPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDrmPropertiesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 hasPrimary = {}; VULKAN_HPP_NAMESPACE::Bool32 hasRender = {}; int64_t primaryMajor = {}; int64_t primaryMinor = {}; int64_t renderMajor = {}; int64_t renderMinor = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT) == sizeof(VkPhysicalDeviceDrmPropertiesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceDrmPropertiesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceDrmPropertiesEXT; }; struct PhysicalDeviceDynamicRenderingFeatures { using NativeType = VkPhysicalDeviceDynamicRenderingFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDynamicRenderingFeatures; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingFeatures(VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), dynamicRendering(dynamicRendering_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingFeatures(PhysicalDeviceDynamicRenderingFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDynamicRenderingFeatures(VkPhysicalDeviceDynamicRenderingFeatures const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDynamicRenderingFeatures(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceDynamicRenderingFeatures &operator=(PhysicalDeviceDynamicRenderingFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDynamicRenderingFeatures &operator=(VkPhysicalDeviceDynamicRenderingFeatures const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingFeatures &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingFeatures &setDynamicRendering(VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_) VULKAN_HPP_NOEXCEPT { dynamicRendering = dynamicRendering_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceDynamicRenderingFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceDynamicRenderingFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, dynamicRendering); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceDynamicRenderingFeatures const &) const = default; #else bool operator==(PhysicalDeviceDynamicRenderingFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (dynamicRendering == rhs.dynamicRendering); # endif } bool operator!=(PhysicalDeviceDynamicRenderingFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDynamicRenderingFeatures; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures) == sizeof(VkPhysicalDeviceDynamicRenderingFeatures), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceDynamicRenderingFeatures is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceDynamicRenderingFeatures; }; using PhysicalDeviceDynamicRenderingFeaturesKHR = PhysicalDeviceDynamicRenderingFeatures; struct PhysicalDeviceExclusiveScissorFeaturesNV { using NativeType = VkPhysicalDeviceExclusiveScissorFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), exclusiveScissor(exclusiveScissor_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV(PhysicalDeviceExclusiveScissorFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExclusiveScissorFeaturesNV(VkPhysicalDeviceExclusiveScissorFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExclusiveScissorFeaturesNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceExclusiveScissorFeaturesNV &operator=(PhysicalDeviceExclusiveScissorFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExclusiveScissorFeaturesNV &operator=(VkPhysicalDeviceExclusiveScissorFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV &setExclusiveScissor(VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_) VULKAN_HPP_NOEXCEPT { exclusiveScissor = exclusiveScissor_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceExclusiveScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceExclusiveScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, exclusiveScissor); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceExclusiveScissorFeaturesNV const &) const = default; #else bool operator==(PhysicalDeviceExclusiveScissorFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (exclusiveScissor == rhs.exclusiveScissor); # endif } bool operator!=(PhysicalDeviceExclusiveScissorFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV) == sizeof(VkPhysicalDeviceExclusiveScissorFeaturesNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceExclusiveScissorFeaturesNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceExclusiveScissorFeaturesNV; }; struct PhysicalDeviceExtendedDynamicState2FeaturesEXT { using NativeType = VkPhysicalDeviceExtendedDynamicState2FeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState2FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), extendedDynamicState2(extendedDynamicState2_), extendedDynamicState2LogicOp(extendedDynamicState2LogicOp_), extendedDynamicState2PatchControlPoints(extendedDynamicState2PatchControlPoints_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState2FeaturesEXT(PhysicalDeviceExtendedDynamicState2FeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExtendedDynamicState2FeaturesEXT(VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExtendedDynamicState2FeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceExtendedDynamicState2FeaturesEXT &operator=(PhysicalDeviceExtendedDynamicState2FeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExtendedDynamicState2FeaturesEXT &operator=(VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & setExtendedDynamicState2(VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2_) VULKAN_HPP_NOEXCEPT { extendedDynamicState2 = extendedDynamicState2_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & setExtendedDynamicState2LogicOp(VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_) VULKAN_HPP_NOEXCEPT { extendedDynamicState2LogicOp = extendedDynamicState2LogicOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & setExtendedDynamicState2PatchControlPoints(VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_) VULKAN_HPP_NOEXCEPT { extendedDynamicState2PatchControlPoints = extendedDynamicState2PatchControlPoints_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, extendedDynamicState2, extendedDynamicState2LogicOp, extendedDynamicState2PatchControlPoints); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceExtendedDynamicState2FeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceExtendedDynamicState2FeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (extendedDynamicState2 == rhs.extendedDynamicState2) && (extendedDynamicState2LogicOp == rhs.extendedDynamicState2LogicOp) && (extendedDynamicState2PatchControlPoints == rhs.extendedDynamicState2PatchControlPoints); # endif } bool operator!=(PhysicalDeviceExtendedDynamicState2FeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2 = {}; VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp = {}; VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT) == sizeof(VkPhysicalDeviceExtendedDynamicState2FeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceExtendedDynamicState2FeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceExtendedDynamicState2FeaturesEXT; }; struct PhysicalDeviceExtendedDynamicStateFeaturesEXT { using NativeType = VkPhysicalDeviceExtendedDynamicStateFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), extendedDynamicState(extendedDynamicState_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT(PhysicalDeviceExtendedDynamicStateFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExtendedDynamicStateFeaturesEXT(VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExtendedDynamicStateFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceExtendedDynamicStateFeaturesEXT &operator=(PhysicalDeviceExtendedDynamicStateFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExtendedDynamicStateFeaturesEXT &operator=(VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT & setExtendedDynamicState(VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_) VULKAN_HPP_NOEXCEPT { extendedDynamicState = extendedDynamicState_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, extendedDynamicState); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceExtendedDynamicStateFeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceExtendedDynamicStateFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (extendedDynamicState == rhs.extendedDynamicState); # endif } bool operator!=(PhysicalDeviceExtendedDynamicStateFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT) == sizeof(VkPhysicalDeviceExtendedDynamicStateFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceExtendedDynamicStateFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceExtendedDynamicStateFeaturesEXT; }; struct PhysicalDeviceExternalBufferInfo { using NativeType = VkPhysicalDeviceExternalBufferInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalBufferInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), usage(usage_), handleType(handleType_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo(PhysicalDeviceExternalBufferInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExternalBufferInfo(VkPhysicalDeviceExternalBufferInfo const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExternalBufferInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceExternalBufferInfo &operator=(PhysicalDeviceExternalBufferInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExternalBufferInfo &operator=(VkPhysicalDeviceExternalBufferInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo &setFlags(VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo &setUsage(VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_) VULKAN_HPP_NOEXCEPT { usage = usage_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setHandleType(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceExternalBufferInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceExternalBufferInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, usage, handleType); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceExternalBufferInfo const &) const = default; #else bool operator==(PhysicalDeviceExternalBufferInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (usage == rhs.usage) && (handleType == rhs.handleType); # endif } bool operator!=(PhysicalDeviceExternalBufferInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {}; VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo) == sizeof(VkPhysicalDeviceExternalBufferInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceExternalBufferInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceExternalBufferInfo; }; using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo; struct PhysicalDeviceExternalFenceInfo { using NativeType = VkPhysicalDeviceExternalFenceInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFenceInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), handleType(handleType_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo(PhysicalDeviceExternalFenceInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExternalFenceInfo(VkPhysicalDeviceExternalFenceInfo const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExternalFenceInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceExternalFenceInfo &operator=(PhysicalDeviceExternalFenceInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExternalFenceInfo &operator=(VkPhysicalDeviceExternalFenceInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo & setHandleType(VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceExternalFenceInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceExternalFenceInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, handleType); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceExternalFenceInfo const &) const = default; #else bool operator==(PhysicalDeviceExternalFenceInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (handleType == rhs.handleType); # endif } bool operator!=(PhysicalDeviceExternalFenceInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo) == sizeof(VkPhysicalDeviceExternalFenceInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceExternalFenceInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceExternalFenceInfo; }; using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo; struct PhysicalDeviceExternalImageFormatInfo { using NativeType = VkPhysicalDeviceExternalImageFormatInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalImageFormatInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), handleType(handleType_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo(PhysicalDeviceExternalImageFormatInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExternalImageFormatInfo(VkPhysicalDeviceExternalImageFormatInfo const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExternalImageFormatInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceExternalImageFormatInfo &operator=(PhysicalDeviceExternalImageFormatInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExternalImageFormatInfo &operator=(VkPhysicalDeviceExternalImageFormatInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo & setHandleType(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceExternalImageFormatInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceExternalImageFormatInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, handleType); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceExternalImageFormatInfo const &) const = default; #else bool operator==(PhysicalDeviceExternalImageFormatInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (handleType == rhs.handleType); # endif } bool operator!=(PhysicalDeviceExternalImageFormatInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo) == sizeof(VkPhysicalDeviceExternalImageFormatInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceExternalImageFormatInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceExternalImageFormatInfo; }; using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo; struct PhysicalDeviceExternalMemoryHostPropertiesEXT { using NativeType = VkPhysicalDeviceExternalMemoryHostPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT(VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), minImportedHostPointerAlignment(minImportedHostPointerAlignment_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT(PhysicalDeviceExternalMemoryHostPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExternalMemoryHostPropertiesEXT(VkPhysicalDeviceExternalMemoryHostPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExternalMemoryHostPropertiesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceExternalMemoryHostPropertiesEXT &operator=(PhysicalDeviceExternalMemoryHostPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExternalMemoryHostPropertiesEXT &operator=(VkPhysicalDeviceExternalMemoryHostPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, minImportedHostPointerAlignment); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceExternalMemoryHostPropertiesEXT const &) const = default; #else bool operator==(PhysicalDeviceExternalMemoryHostPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment); # endif } bool operator!=(PhysicalDeviceExternalMemoryHostPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT) == sizeof(VkPhysicalDeviceExternalMemoryHostPropertiesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceExternalMemoryHostPropertiesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceExternalMemoryHostPropertiesEXT; }; struct PhysicalDeviceExternalMemoryRDMAFeaturesNV { using NativeType = VkPhysicalDeviceExternalMemoryRDMAFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryRDMAFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), externalMemoryRDMA(externalMemoryRDMA_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryRDMAFeaturesNV(PhysicalDeviceExternalMemoryRDMAFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExternalMemoryRDMAFeaturesNV(VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExternalMemoryRDMAFeaturesNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceExternalMemoryRDMAFeaturesNV &operator=(PhysicalDeviceExternalMemoryRDMAFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExternalMemoryRDMAFeaturesNV &operator=(VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryRDMAFeaturesNV &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryRDMAFeaturesNV & setExternalMemoryRDMA(VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA_) VULKAN_HPP_NOEXCEPT { externalMemoryRDMA = externalMemoryRDMA_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, externalMemoryRDMA); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceExternalMemoryRDMAFeaturesNV const &) const = default; #else bool operator==(PhysicalDeviceExternalMemoryRDMAFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (externalMemoryRDMA == rhs.externalMemoryRDMA); # endif } bool operator!=(PhysicalDeviceExternalMemoryRDMAFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV) == sizeof(VkPhysicalDeviceExternalMemoryRDMAFeaturesNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceExternalMemoryRDMAFeaturesNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceExternalMemoryRDMAFeaturesNV; }; struct PhysicalDeviceExternalSemaphoreInfo { using NativeType = VkPhysicalDeviceExternalSemaphoreInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalSemaphoreInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), handleType(handleType_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo(PhysicalDeviceExternalSemaphoreInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExternalSemaphoreInfo(VkPhysicalDeviceExternalSemaphoreInfo const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExternalSemaphoreInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceExternalSemaphoreInfo &operator=(PhysicalDeviceExternalSemaphoreInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExternalSemaphoreInfo &operator=(VkPhysicalDeviceExternalSemaphoreInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo & setHandleType(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceExternalSemaphoreInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceExternalSemaphoreInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, handleType); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceExternalSemaphoreInfo const &) const = default; #else bool operator==(PhysicalDeviceExternalSemaphoreInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (handleType == rhs.handleType); # endif } bool operator!=(PhysicalDeviceExternalSemaphoreInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo) == sizeof(VkPhysicalDeviceExternalSemaphoreInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceExternalSemaphoreInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceExternalSemaphoreInfo; }; using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo; struct PhysicalDeviceFeatures2 { using NativeType = VkPhysicalDeviceFeatures2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFeatures2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), features(features_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2(PhysicalDeviceFeatures2 const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFeatures2(VkPhysicalDeviceFeatures2 const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFeatures2(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceFeatures2 &operator=(PhysicalDeviceFeatures2 const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFeatures2 &operator=(VkPhysicalDeviceFeatures2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 &setFeatures(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const &features_) VULKAN_HPP_NOEXCEPT { features = features_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceFeatures2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceFeatures2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, features); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceFeatures2 const &) const = default; #else bool operator==(PhysicalDeviceFeatures2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (features == rhs.features); # endif } bool operator!=(PhysicalDeviceFeatures2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFeatures2; void *pNext = {}; VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2) == sizeof(VkPhysicalDeviceFeatures2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceFeatures2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceFeatures2; }; using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2; struct PhysicalDeviceFloatControlsProperties { using NativeType = VkPhysicalDeviceFloatControlsProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFloatControlsProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), denormBehaviorIndependence(denormBehaviorIndependence_), roundingModeIndependence(roundingModeIndependence_), shaderSignedZeroInfNanPreserveFloat16(shaderSignedZeroInfNanPreserveFloat16_), shaderSignedZeroInfNanPreserveFloat32(shaderSignedZeroInfNanPreserveFloat32_), shaderSignedZeroInfNanPreserveFloat64(shaderSignedZeroInfNanPreserveFloat64_), shaderDenormPreserveFloat16(shaderDenormPreserveFloat16_), shaderDenormPreserveFloat32(shaderDenormPreserveFloat32_), shaderDenormPreserveFloat64(shaderDenormPreserveFloat64_), shaderDenormFlushToZeroFloat16(shaderDenormFlushToZeroFloat16_), shaderDenormFlushToZeroFloat32(shaderDenormFlushToZeroFloat32_), shaderDenormFlushToZeroFloat64(shaderDenormFlushToZeroFloat64_), shaderRoundingModeRTEFloat16(shaderRoundingModeRTEFloat16_), shaderRoundingModeRTEFloat32(shaderRoundingModeRTEFloat32_), shaderRoundingModeRTEFloat64(shaderRoundingModeRTEFloat64_), shaderRoundingModeRTZFloat16(shaderRoundingModeRTZFloat16_), shaderRoundingModeRTZFloat32(shaderRoundingModeRTZFloat32_), shaderRoundingModeRTZFloat64(shaderRoundingModeRTZFloat64_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties(PhysicalDeviceFloatControlsProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFloatControlsProperties(VkPhysicalDeviceFloatControlsProperties const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFloatControlsProperties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceFloatControlsProperties &operator=(PhysicalDeviceFloatControlsProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFloatControlsProperties &operator=(VkPhysicalDeviceFloatControlsProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceFloatControlsProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceFloatControlsProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, denormBehaviorIndependence, roundingModeIndependence, shaderSignedZeroInfNanPreserveFloat16, shaderSignedZeroInfNanPreserveFloat32, shaderSignedZeroInfNanPreserveFloat64, shaderDenormPreserveFloat16, shaderDenormPreserveFloat32, shaderDenormPreserveFloat64, shaderDenormFlushToZeroFloat16, shaderDenormFlushToZeroFloat32, shaderDenormFlushToZeroFloat64, shaderRoundingModeRTEFloat16, shaderRoundingModeRTEFloat32, shaderRoundingModeRTEFloat64, shaderRoundingModeRTZFloat16, shaderRoundingModeRTZFloat32, shaderRoundingModeRTZFloat64); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceFloatControlsProperties const &) const = default; #else bool operator==(PhysicalDeviceFloatControlsProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (denormBehaviorIndependence == rhs.denormBehaviorIndependence) && (roundingModeIndependence == rhs.roundingModeIndependence) && (shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16) && (shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32) && (shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64) && (shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16) && (shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32) && (shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64) && (shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16) && (shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32) && (shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64) && (shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16) && (shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32) && (shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64) && (shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16) && (shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32) && (shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64); # endif } bool operator!=(PhysicalDeviceFloatControlsProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFloatControlsProperties; void *pNext = {}; VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties) == sizeof(VkPhysicalDeviceFloatControlsProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceFloatControlsProperties is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceFloatControlsProperties; }; using PhysicalDeviceFloatControlsPropertiesKHR = PhysicalDeviceFloatControlsProperties; struct PhysicalDeviceFragmentDensityMap2FeaturesEXT { using NativeType = VkPhysicalDeviceFragmentDensityMap2FeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), fragmentDensityMapDeferred(fragmentDensityMapDeferred_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT(PhysicalDeviceFragmentDensityMap2FeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentDensityMap2FeaturesEXT(VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFragmentDensityMap2FeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceFragmentDensityMap2FeaturesEXT &operator=(PhysicalDeviceFragmentDensityMap2FeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentDensityMap2FeaturesEXT &operator=(VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2FeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2FeaturesEXT & setFragmentDensityMapDeferred(VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_) VULKAN_HPP_NOEXCEPT { fragmentDensityMapDeferred = fragmentDensityMapDeferred_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, fragmentDensityMapDeferred); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceFragmentDensityMap2FeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceFragmentDensityMap2FeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (fragmentDensityMapDeferred == rhs.fragmentDensityMapDeferred); # endif } bool operator!=(PhysicalDeviceFragmentDensityMap2FeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT) == sizeof(VkPhysicalDeviceFragmentDensityMap2FeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceFragmentDensityMap2FeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceFragmentDensityMap2FeaturesEXT; }; struct PhysicalDeviceFragmentDensityMap2PropertiesEXT { using NativeType = VkPhysicalDeviceFragmentDensityMap2PropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess_ = {}, uint32_t maxSubsampledArrayLayers_ = {}, uint32_t maxDescriptorSetSubsampledSamplers_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), subsampledLoads(subsampledLoads_), subsampledCoarseReconstructionEarlyAccess(subsampledCoarseReconstructionEarlyAccess_), maxSubsampledArrayLayers(maxSubsampledArrayLayers_), maxDescriptorSetSubsampledSamplers(maxDescriptorSetSubsampledSamplers_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT(PhysicalDeviceFragmentDensityMap2PropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentDensityMap2PropertiesEXT(VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFragmentDensityMap2PropertiesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceFragmentDensityMap2PropertiesEXT &operator=(PhysicalDeviceFragmentDensityMap2PropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentDensityMap2PropertiesEXT &operator=(VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, subsampledLoads, subsampledCoarseReconstructionEarlyAccess, maxSubsampledArrayLayers, maxDescriptorSetSubsampledSamplers); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceFragmentDensityMap2PropertiesEXT const &) const = default; #else bool operator==(PhysicalDeviceFragmentDensityMap2PropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (subsampledLoads == rhs.subsampledLoads) && (subsampledCoarseReconstructionEarlyAccess == rhs.subsampledCoarseReconstructionEarlyAccess) && (maxSubsampledArrayLayers == rhs.maxSubsampledArrayLayers) && (maxDescriptorSetSubsampledSamplers == rhs.maxDescriptorSetSubsampledSamplers); # endif } bool operator!=(PhysicalDeviceFragmentDensityMap2PropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads = {}; VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess = {}; uint32_t maxSubsampledArrayLayers = {}; uint32_t maxDescriptorSetSubsampledSamplers = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT) == sizeof(VkPhysicalDeviceFragmentDensityMap2PropertiesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceFragmentDensityMap2PropertiesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceFragmentDensityMap2PropertiesEXT; }; struct PhysicalDeviceFragmentDensityMapFeaturesEXT { using NativeType = VkPhysicalDeviceFragmentDensityMapFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), fragmentDensityMap(fragmentDensityMap_), fragmentDensityMapDynamic(fragmentDensityMapDynamic_), fragmentDensityMapNonSubsampledImages(fragmentDensityMapNonSubsampledImages_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT(PhysicalDeviceFragmentDensityMapFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentDensityMapFeaturesEXT(VkPhysicalDeviceFragmentDensityMapFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFragmentDensityMapFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceFragmentDensityMapFeaturesEXT &operator=(PhysicalDeviceFragmentDensityMapFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentDensityMapFeaturesEXT &operator=(VkPhysicalDeviceFragmentDensityMapFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMap(VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_) VULKAN_HPP_NOEXCEPT { fragmentDensityMap = fragmentDensityMap_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMapDynamic(VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_) VULKAN_HPP_NOEXCEPT { fragmentDensityMapDynamic = fragmentDensityMapDynamic_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMapNonSubsampledImages(VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_) VULKAN_HPP_NOEXCEPT { fragmentDensityMapNonSubsampledImages = fragmentDensityMapNonSubsampledImages_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, fragmentDensityMap, fragmentDensityMapDynamic, fragmentDensityMapNonSubsampledImages); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceFragmentDensityMapFeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceFragmentDensityMapFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (fragmentDensityMap == rhs.fragmentDensityMap) && (fragmentDensityMapDynamic == rhs.fragmentDensityMapDynamic) && (fragmentDensityMapNonSubsampledImages == rhs.fragmentDensityMapNonSubsampledImages); # endif } bool operator!=(PhysicalDeviceFragmentDensityMapFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap = {}; VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic = {}; VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT) == sizeof(VkPhysicalDeviceFragmentDensityMapFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceFragmentDensityMapFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceFragmentDensityMapFeaturesEXT; }; struct PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM { using NativeType = VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM(VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), fragmentDensityMapOffset(fragmentDensityMapOffset_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM(PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM(VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM & operator=(PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM &operator=(VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM & setFragmentDensityMapOffset(VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset_) VULKAN_HPP_NOEXCEPT { fragmentDensityMapOffset = fragmentDensityMapOffset_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, fragmentDensityMapOffset); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const &) const = default; #else bool operator==(PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (fragmentDensityMapOffset == rhs.fragmentDensityMapOffset); # endif } bool operator!=(PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM) == sizeof(VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; }; struct PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM { using NativeType = VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM(VULKAN_HPP_NAMESPACE::Extent2D fragmentDensityOffsetGranularity_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), fragmentDensityOffsetGranularity(fragmentDensityOffsetGranularity_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM(PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM(VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM & operator=(PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM &operator=(VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, fragmentDensityOffsetGranularity); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const &) const = default; #else bool operator==(PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (fragmentDensityOffsetGranularity == rhs.fragmentDensityOffsetGranularity); # endif } bool operator!=(PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; void *pNext = {}; VULKAN_HPP_NAMESPACE::Extent2D fragmentDensityOffsetGranularity = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM) == sizeof(VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; }; struct PhysicalDeviceFragmentDensityMapPropertiesEXT { using NativeType = VkPhysicalDeviceFragmentDensityMapPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT(VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), minFragmentDensityTexelSize(minFragmentDensityTexelSize_), maxFragmentDensityTexelSize(maxFragmentDensityTexelSize_), fragmentDensityInvocations(fragmentDensityInvocations_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT(PhysicalDeviceFragmentDensityMapPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentDensityMapPropertiesEXT(VkPhysicalDeviceFragmentDensityMapPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFragmentDensityMapPropertiesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceFragmentDensityMapPropertiesEXT &operator=(PhysicalDeviceFragmentDensityMapPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentDensityMapPropertiesEXT &operator=(VkPhysicalDeviceFragmentDensityMapPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, minFragmentDensityTexelSize, maxFragmentDensityTexelSize, fragmentDensityInvocations); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceFragmentDensityMapPropertiesEXT const &) const = default; #else bool operator==(PhysicalDeviceFragmentDensityMapPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (minFragmentDensityTexelSize == rhs.minFragmentDensityTexelSize) && (maxFragmentDensityTexelSize == rhs.maxFragmentDensityTexelSize) && (fragmentDensityInvocations == rhs.fragmentDensityInvocations); # endif } bool operator!=(PhysicalDeviceFragmentDensityMapPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize = {}; VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize = {}; VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT) == sizeof(VkPhysicalDeviceFragmentDensityMapPropertiesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceFragmentDensityMapPropertiesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceFragmentDensityMapPropertiesEXT; }; struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV { using NativeType = VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), fragmentShaderBarycentric(fragmentShaderBarycentric_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV(PhysicalDeviceFragmentShaderBarycentricFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentShaderBarycentricFeaturesNV(VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFragmentShaderBarycentricFeaturesNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceFragmentShaderBarycentricFeaturesNV & operator=(PhysicalDeviceFragmentShaderBarycentricFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentShaderBarycentricFeaturesNV &operator=(VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesNV &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesNV & setFragmentShaderBarycentric(VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_) VULKAN_HPP_NOEXCEPT { fragmentShaderBarycentric = fragmentShaderBarycentric_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, fragmentShaderBarycentric); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceFragmentShaderBarycentricFeaturesNV const &) const = default; #else bool operator==(PhysicalDeviceFragmentShaderBarycentricFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (fragmentShaderBarycentric == rhs.fragmentShaderBarycentric); # endif } bool operator!=(PhysicalDeviceFragmentShaderBarycentricFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV) == sizeof(VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceFragmentShaderBarycentricFeaturesNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceFragmentShaderBarycentricFeaturesNV; }; struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT { using NativeType = VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), fragmentShaderSampleInterlock(fragmentShaderSampleInterlock_), fragmentShaderPixelInterlock(fragmentShaderPixelInterlock_), fragmentShaderShadingRateInterlock(fragmentShaderShadingRateInterlock_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT(PhysicalDeviceFragmentShaderInterlockFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentShaderInterlockFeaturesEXT(VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFragmentShaderInterlockFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceFragmentShaderInterlockFeaturesEXT &operator=(PhysicalDeviceFragmentShaderInterlockFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentShaderInterlockFeaturesEXT &operator=(VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderSampleInterlock(VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_) VULKAN_HPP_NOEXCEPT { fragmentShaderSampleInterlock = fragmentShaderSampleInterlock_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderPixelInterlock(VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_) VULKAN_HPP_NOEXCEPT { fragmentShaderPixelInterlock = fragmentShaderPixelInterlock_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderShadingRateInterlock(VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_) VULKAN_HPP_NOEXCEPT { fragmentShaderShadingRateInterlock = fragmentShaderShadingRateInterlock_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, fragmentShaderSampleInterlock, fragmentShaderPixelInterlock, fragmentShaderShadingRateInterlock); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceFragmentShaderInterlockFeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceFragmentShaderInterlockFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (fragmentShaderSampleInterlock == rhs.fragmentShaderSampleInterlock) && (fragmentShaderPixelInterlock == rhs.fragmentShaderPixelInterlock) && (fragmentShaderShadingRateInterlock == rhs.fragmentShaderShadingRateInterlock); # endif } bool operator!=(PhysicalDeviceFragmentShaderInterlockFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock = {}; VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock = {}; VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT) == sizeof(VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceFragmentShaderInterlockFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceFragmentShaderInterlockFeaturesEXT; }; struct PhysicalDeviceFragmentShadingRateEnumsFeaturesNV { using NativeType = VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums_ = {}, VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_ = {}, VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), fragmentShadingRateEnums(fragmentShadingRateEnums_), supersampleFragmentShadingRates(supersampleFragmentShadingRates_), noInvocationFragmentShadingRates(noInvocationFragmentShadingRates_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsFeaturesNV(PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentShadingRateEnumsFeaturesNV(VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFragmentShadingRateEnumsFeaturesNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceFragmentShadingRateEnumsFeaturesNV &operator=(PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentShadingRateEnumsFeaturesNV &operator=(VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setFragmentShadingRateEnums(VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums_) VULKAN_HPP_NOEXCEPT { fragmentShadingRateEnums = fragmentShadingRateEnums_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setSupersampleFragmentShadingRates(VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_) VULKAN_HPP_NOEXCEPT { supersampleFragmentShadingRates = supersampleFragmentShadingRates_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setNoInvocationFragmentShadingRates(VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_) VULKAN_HPP_NOEXCEPT { noInvocationFragmentShadingRates = noInvocationFragmentShadingRates_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, fragmentShadingRateEnums, supersampleFragmentShadingRates, noInvocationFragmentShadingRates); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const &) const = default; #else bool operator==(PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (fragmentShadingRateEnums == rhs.fragmentShadingRateEnums) && (supersampleFragmentShadingRates == rhs.supersampleFragmentShadingRates) && (noInvocationFragmentShadingRates == rhs.noInvocationFragmentShadingRates); # endif } bool operator!=(PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums = {}; VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates = {}; VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV) == sizeof(VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceFragmentShadingRateEnumsFeaturesNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceFragmentShadingRateEnumsFeaturesNV; }; struct PhysicalDeviceFragmentShadingRateEnumsPropertiesNV { using NativeType = VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), maxFragmentShadingRateInvocationCount(maxFragmentShadingRateInvocationCount_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsPropertiesNV(PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentShadingRateEnumsPropertiesNV(VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFragmentShadingRateEnumsPropertiesNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & operator=(PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentShadingRateEnumsPropertiesNV &operator=(VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsPropertiesNV &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & setMaxFragmentShadingRateInvocationCount(VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount_) VULKAN_HPP_NOEXCEPT { maxFragmentShadingRateInvocationCount = maxFragmentShadingRateInvocationCount_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, maxFragmentShadingRateInvocationCount); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const &) const = default; #else bool operator==(PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (maxFragmentShadingRateInvocationCount == rhs.maxFragmentShadingRateInvocationCount); # endif } bool operator!=(PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV; void *pNext = {}; VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV) == sizeof(VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceFragmentShadingRateEnumsPropertiesNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceFragmentShadingRateEnumsPropertiesNV; }; struct PhysicalDeviceFragmentShadingRateFeaturesKHR { using NativeType = VkPhysicalDeviceFragmentShadingRateFeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ = {}, VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), pipelineFragmentShadingRate(pipelineFragmentShadingRate_), primitiveFragmentShadingRate(primitiveFragmentShadingRate_), attachmentFragmentShadingRate(attachmentFragmentShadingRate_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR(PhysicalDeviceFragmentShadingRateFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentShadingRateFeaturesKHR(VkPhysicalDeviceFragmentShadingRateFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFragmentShadingRateFeaturesKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceFragmentShadingRateFeaturesKHR &operator=(PhysicalDeviceFragmentShadingRateFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentShadingRateFeaturesKHR &operator=(VkPhysicalDeviceFragmentShadingRateFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & setPipelineFragmentShadingRate(VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_) VULKAN_HPP_NOEXCEPT { pipelineFragmentShadingRate = pipelineFragmentShadingRate_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & setPrimitiveFragmentShadingRate(VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_) VULKAN_HPP_NOEXCEPT { primitiveFragmentShadingRate = primitiveFragmentShadingRate_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & setAttachmentFragmentShadingRate(VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_) VULKAN_HPP_NOEXCEPT { attachmentFragmentShadingRate = attachmentFragmentShadingRate_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, pipelineFragmentShadingRate, primitiveFragmentShadingRate, attachmentFragmentShadingRate); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceFragmentShadingRateFeaturesKHR const &) const = default; #else bool operator==(PhysicalDeviceFragmentShadingRateFeaturesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (pipelineFragmentShadingRate == rhs.pipelineFragmentShadingRate) && (primitiveFragmentShadingRate == rhs.primitiveFragmentShadingRate) && (attachmentFragmentShadingRate == rhs.attachmentFragmentShadingRate); # endif } bool operator!=(PhysicalDeviceFragmentShadingRateFeaturesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate = {}; VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate = {}; VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR) == sizeof(VkPhysicalDeviceFragmentShadingRateFeaturesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceFragmentShadingRateFeaturesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceFragmentShadingRateFeaturesKHR; }; struct PhysicalDeviceFragmentShadingRateKHR { using NativeType = VkPhysicalDeviceFragmentShadingRateKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR(VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), sampleCounts(sampleCounts_), fragmentSize(fragmentSize_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR(PhysicalDeviceFragmentShadingRateKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentShadingRateKHR(VkPhysicalDeviceFragmentShadingRateKHR const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFragmentShadingRateKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceFragmentShadingRateKHR &operator=(PhysicalDeviceFragmentShadingRateKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentShadingRateKHR &operator=(VkPhysicalDeviceFragmentShadingRateKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceFragmentShadingRateKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceFragmentShadingRateKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std:: tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, sampleCounts, fragmentSize); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceFragmentShadingRateKHR const &) const = default; #else bool operator==(PhysicalDeviceFragmentShadingRateKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (sampleCounts == rhs.sampleCounts) && (fragmentSize == rhs.fragmentSize); # endif } bool operator!=(PhysicalDeviceFragmentShadingRateKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateKHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {}; VULKAN_HPP_NAMESPACE::Extent2D fragmentSize = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR) == sizeof(VkPhysicalDeviceFragmentShadingRateKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceFragmentShadingRateKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceFragmentShadingRateKHR; }; struct PhysicalDeviceFragmentShadingRatePropertiesKHR { using NativeType = VkPhysicalDeviceFragmentShadingRatePropertiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRatePropertiesKHR( VULKAN_HPP_NAMESPACE::Extent2D minFragmentShadingRateAttachmentTexelSize_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxFragmentShadingRateAttachmentTexelSize_ = {}, uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateWithMultipleViewports_ = {}, VULKAN_HPP_NAMESPACE::Bool32 layeredShadingRateAttachments_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateNonTrivialCombinerOps_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxFragmentSize_ = {}, uint32_t maxFragmentSizeAspectRatio_ = {}, uint32_t maxFragmentShadingRateCoverageSamples_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), minFragmentShadingRateAttachmentTexelSize(minFragmentShadingRateAttachmentTexelSize_), maxFragmentShadingRateAttachmentTexelSize(maxFragmentShadingRateAttachmentTexelSize_), maxFragmentShadingRateAttachmentTexelSizeAspectRatio(maxFragmentShadingRateAttachmentTexelSizeAspectRatio_), primitiveFragmentShadingRateWithMultipleViewports(primitiveFragmentShadingRateWithMultipleViewports_), layeredShadingRateAttachments(layeredShadingRateAttachments_), fragmentShadingRateNonTrivialCombinerOps(fragmentShadingRateNonTrivialCombinerOps_), maxFragmentSize(maxFragmentSize_), maxFragmentSizeAspectRatio(maxFragmentSizeAspectRatio_), maxFragmentShadingRateCoverageSamples(maxFragmentShadingRateCoverageSamples_), maxFragmentShadingRateRasterizationSamples(maxFragmentShadingRateRasterizationSamples_), fragmentShadingRateWithShaderDepthStencilWrites(fragmentShadingRateWithShaderDepthStencilWrites_), fragmentShadingRateWithSampleMask(fragmentShadingRateWithSampleMask_), fragmentShadingRateWithShaderSampleMask(fragmentShadingRateWithShaderSampleMask_), fragmentShadingRateWithConservativeRasterization(fragmentShadingRateWithConservativeRasterization_), fragmentShadingRateWithFragmentShaderInterlock(fragmentShadingRateWithFragmentShaderInterlock_), fragmentShadingRateWithCustomSampleLocations(fragmentShadingRateWithCustomSampleLocations_), fragmentShadingRateStrictMultiplyCombiner(fragmentShadingRateStrictMultiplyCombiner_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRatePropertiesKHR(PhysicalDeviceFragmentShadingRatePropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentShadingRatePropertiesKHR(VkPhysicalDeviceFragmentShadingRatePropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFragmentShadingRatePropertiesKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceFragmentShadingRatePropertiesKHR &operator=(PhysicalDeviceFragmentShadingRatePropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentShadingRatePropertiesKHR &operator=(VkPhysicalDeviceFragmentShadingRatePropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, minFragmentShadingRateAttachmentTexelSize, maxFragmentShadingRateAttachmentTexelSize, maxFragmentShadingRateAttachmentTexelSizeAspectRatio, primitiveFragmentShadingRateWithMultipleViewports, layeredShadingRateAttachments, fragmentShadingRateNonTrivialCombinerOps, maxFragmentSize, maxFragmentSizeAspectRatio, maxFragmentShadingRateCoverageSamples, maxFragmentShadingRateRasterizationSamples, fragmentShadingRateWithShaderDepthStencilWrites, fragmentShadingRateWithSampleMask, fragmentShadingRateWithShaderSampleMask, fragmentShadingRateWithConservativeRasterization, fragmentShadingRateWithFragmentShaderInterlock, fragmentShadingRateWithCustomSampleLocations, fragmentShadingRateStrictMultiplyCombiner); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceFragmentShadingRatePropertiesKHR const &) const = default; #else bool operator==(PhysicalDeviceFragmentShadingRatePropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (minFragmentShadingRateAttachmentTexelSize == rhs.minFragmentShadingRateAttachmentTexelSize) && (maxFragmentShadingRateAttachmentTexelSize == rhs.maxFragmentShadingRateAttachmentTexelSize) && (maxFragmentShadingRateAttachmentTexelSizeAspectRatio == rhs.maxFragmentShadingRateAttachmentTexelSizeAspectRatio) && (primitiveFragmentShadingRateWithMultipleViewports == rhs.primitiveFragmentShadingRateWithMultipleViewports) && (layeredShadingRateAttachments == rhs.layeredShadingRateAttachments) && (fragmentShadingRateNonTrivialCombinerOps == rhs.fragmentShadingRateNonTrivialCombinerOps) && (maxFragmentSize == rhs.maxFragmentSize) && (maxFragmentSizeAspectRatio == rhs.maxFragmentSizeAspectRatio) && (maxFragmentShadingRateCoverageSamples == rhs.maxFragmentShadingRateCoverageSamples) && (maxFragmentShadingRateRasterizationSamples == rhs.maxFragmentShadingRateRasterizationSamples) && (fragmentShadingRateWithShaderDepthStencilWrites == rhs.fragmentShadingRateWithShaderDepthStencilWrites) && (fragmentShadingRateWithSampleMask == rhs.fragmentShadingRateWithSampleMask) && (fragmentShadingRateWithShaderSampleMask == rhs.fragmentShadingRateWithShaderSampleMask) && (fragmentShadingRateWithConservativeRasterization == rhs.fragmentShadingRateWithConservativeRasterization) && (fragmentShadingRateWithFragmentShaderInterlock == rhs.fragmentShadingRateWithFragmentShaderInterlock) && (fragmentShadingRateWithCustomSampleLocations == rhs.fragmentShadingRateWithCustomSampleLocations) && (fragmentShadingRateStrictMultiplyCombiner == rhs.fragmentShadingRateStrictMultiplyCombiner); # endif } bool operator!=(PhysicalDeviceFragmentShadingRatePropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::Extent2D minFragmentShadingRateAttachmentTexelSize = {}; VULKAN_HPP_NAMESPACE::Extent2D maxFragmentShadingRateAttachmentTexelSize = {}; uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio = {}; VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateWithMultipleViewports = {}; VULKAN_HPP_NAMESPACE::Bool32 layeredShadingRateAttachments = {}; VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateNonTrivialCombinerOps = {}; VULKAN_HPP_NAMESPACE::Extent2D maxFragmentSize = {}; uint32_t maxFragmentSizeAspectRatio = {}; uint32_t maxFragmentShadingRateCoverageSamples = {}; VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites = {}; VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask = {}; VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask = {}; VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization = {}; VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock = {}; VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations = {}; VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR) == sizeof(VkPhysicalDeviceFragmentShadingRatePropertiesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceFragmentShadingRatePropertiesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceFragmentShadingRatePropertiesKHR; }; struct PhysicalDeviceGlobalPriorityQueryFeaturesKHR { using NativeType = VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), globalPriorityQuery(globalPriorityQuery_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeaturesKHR(PhysicalDeviceGlobalPriorityQueryFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceGlobalPriorityQueryFeaturesKHR(VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceGlobalPriorityQueryFeaturesKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceGlobalPriorityQueryFeaturesKHR &operator=(PhysicalDeviceGlobalPriorityQueryFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceGlobalPriorityQueryFeaturesKHR &operator=(VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeaturesKHR &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeaturesKHR & setGlobalPriorityQuery(VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_) VULKAN_HPP_NOEXCEPT { globalPriorityQuery = globalPriorityQuery_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, globalPriorityQuery); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceGlobalPriorityQueryFeaturesKHR const &) const = default; #else bool operator==(PhysicalDeviceGlobalPriorityQueryFeaturesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (globalPriorityQuery == rhs.globalPriorityQuery); # endif } bool operator!=(PhysicalDeviceGlobalPriorityQueryFeaturesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesKHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR) == sizeof(VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceGlobalPriorityQueryFeaturesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceGlobalPriorityQueryFeaturesKHR; }; using PhysicalDeviceGlobalPriorityQueryFeaturesEXT = PhysicalDeviceGlobalPriorityQueryFeaturesKHR; struct PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT { using NativeType = VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibrary_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), graphicsPipelineLibrary(graphicsPipelineLibrary_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT(PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT(VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT &operator=(PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT &operator=(VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & setGraphicsPipelineLibrary(VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibrary_) VULKAN_HPP_NOEXCEPT { graphicsPipelineLibrary = graphicsPipelineLibrary_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, graphicsPipelineLibrary); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (graphicsPipelineLibrary == rhs.graphicsPipelineLibrary); # endif } bool operator!=(PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibrary = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT) == sizeof(VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; }; struct PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT { using NativeType = VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryFastLinking_ = {}, VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryIndependentInterpolationDecoration_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), graphicsPipelineLibraryFastLinking(graphicsPipelineLibraryFastLinking_), graphicsPipelineLibraryIndependentInterpolationDecoration(graphicsPipelineLibraryIndependentInterpolationDecoration_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT(PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT(VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & operator=(PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT &operator=(VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & setGraphicsPipelineLibraryFastLinking(VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryFastLinking_) VULKAN_HPP_NOEXCEPT { graphicsPipelineLibraryFastLinking = graphicsPipelineLibraryFastLinking_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT &setGraphicsPipelineLibraryIndependentInterpolationDecoration( VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryIndependentInterpolationDecoration_) VULKAN_HPP_NOEXCEPT { graphicsPipelineLibraryIndependentInterpolationDecoration = graphicsPipelineLibraryIndependentInterpolationDecoration_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, graphicsPipelineLibraryFastLinking, graphicsPipelineLibraryIndependentInterpolationDecoration); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const &) const = default; #else bool operator==(PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (graphicsPipelineLibraryFastLinking == rhs.graphicsPipelineLibraryFastLinking) && (graphicsPipelineLibraryIndependentInterpolationDecoration == rhs.graphicsPipelineLibraryIndependentInterpolationDecoration); # endif } bool operator!=(PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryFastLinking = {}; VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryIndependentInterpolationDecoration = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT) == sizeof(VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; }; struct PhysicalDeviceGroupProperties { using NativeType = VkPhysicalDeviceGroupProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGroupProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties(uint32_t physicalDeviceCount_ = {}, std::array const &physicalDevices_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), physicalDeviceCount(physicalDeviceCount_), physicalDevices(physicalDevices_), subsetAllocation(subsetAllocation_) { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties(PhysicalDeviceGroupProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceGroupProperties(VkPhysicalDeviceGroupProperties const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceGroupProperties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceGroupProperties &operator=(PhysicalDeviceGroupProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceGroupProperties &operator=(VkPhysicalDeviceGroupProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceGroupProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceGroupProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &, VULKAN_HPP_NAMESPACE::Bool32 const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, physicalDeviceCount, physicalDevices, subsetAllocation); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceGroupProperties const &) const = default; #else bool operator==(PhysicalDeviceGroupProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (physicalDeviceCount == rhs.physicalDeviceCount) && (physicalDevices == rhs.physicalDevices) && (subsetAllocation == rhs.subsetAllocation); # endif } bool operator!=(PhysicalDeviceGroupProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGroupProperties; void *pNext = {}; uint32_t physicalDeviceCount = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D physicalDevices = {}; VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties) == sizeof(VkPhysicalDeviceGroupProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceGroupProperties is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceGroupProperties; }; using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties; struct PhysicalDeviceHostQueryResetFeatures { using NativeType = VkPhysicalDeviceHostQueryResetFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostQueryResetFeatures; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures(VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), hostQueryReset(hostQueryReset_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures(PhysicalDeviceHostQueryResetFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceHostQueryResetFeatures(VkPhysicalDeviceHostQueryResetFeatures const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceHostQueryResetFeatures(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceHostQueryResetFeatures &operator=(PhysicalDeviceHostQueryResetFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceHostQueryResetFeatures &operator=(VkPhysicalDeviceHostQueryResetFeatures const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures &setHostQueryReset(VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_) VULKAN_HPP_NOEXCEPT { hostQueryReset = hostQueryReset_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceHostQueryResetFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceHostQueryResetFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, hostQueryReset); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceHostQueryResetFeatures const &) const = default; #else bool operator==(PhysicalDeviceHostQueryResetFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (hostQueryReset == rhs.hostQueryReset); # endif } bool operator!=(PhysicalDeviceHostQueryResetFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostQueryResetFeatures; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures) == sizeof(VkPhysicalDeviceHostQueryResetFeatures), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceHostQueryResetFeatures is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceHostQueryResetFeatures; }; using PhysicalDeviceHostQueryResetFeaturesEXT = PhysicalDeviceHostQueryResetFeatures; struct PhysicalDeviceIDProperties { using NativeType = VkPhysicalDeviceIDProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIdProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties(std::array const &deviceUUID_ = {}, std::array const &driverUUID_ = {}, std::array const &deviceLUID_ = {}, uint32_t deviceNodeMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), deviceUUID(deviceUUID_), driverUUID(driverUUID_), deviceLUID(deviceLUID_), deviceNodeMask(deviceNodeMask_), deviceLUIDValid(deviceLUIDValid_) { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties(PhysicalDeviceIDProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceIDProperties(VkPhysicalDeviceIDProperties const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceIDProperties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceIDProperties &operator=(PhysicalDeviceIDProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceIDProperties &operator=(VkPhysicalDeviceIDProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceIDProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceIDProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, deviceUUID, driverUUID, deviceLUID, deviceNodeMask, deviceLUIDValid); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceIDProperties const &) const = default; #else bool operator==(PhysicalDeviceIDProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (deviceUUID == rhs.deviceUUID) && (driverUUID == rhs.driverUUID) && (deviceLUID == rhs.deviceLUID) && (deviceNodeMask == rhs.deviceNodeMask) && (deviceLUIDValid == rhs.deviceLUIDValid); # endif } bool operator!=(PhysicalDeviceIDProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIdProperties; void *pNext = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceUUID = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverUUID = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceLUID = {}; uint32_t deviceNodeMask = {}; VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties) == sizeof(VkPhysicalDeviceIDProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceIDProperties is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceIDProperties; }; using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties; struct PhysicalDeviceImage2DViewOf3DFeaturesEXT { using NativeType = VkPhysicalDeviceImage2DViewOf3DFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceImage2DViewOf3DFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 image2DViewOf3D_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sampler2DViewOf3D_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), image2DViewOf3D(image2DViewOf3D_), sampler2DViewOf3D(sampler2DViewOf3D_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceImage2DViewOf3DFeaturesEXT(PhysicalDeviceImage2DViewOf3DFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImage2DViewOf3DFeaturesEXT(VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceImage2DViewOf3DFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceImage2DViewOf3DFeaturesEXT &operator=(PhysicalDeviceImage2DViewOf3DFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImage2DViewOf3DFeaturesEXT &operator=(VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT &setImage2DViewOf3D(VULKAN_HPP_NAMESPACE::Bool32 image2DViewOf3D_) VULKAN_HPP_NOEXCEPT { image2DViewOf3D = image2DViewOf3D_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT & setSampler2DViewOf3D(VULKAN_HPP_NAMESPACE::Bool32 sampler2DViewOf3D_) VULKAN_HPP_NOEXCEPT { sampler2DViewOf3D = sampler2DViewOf3D_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceImage2DViewOf3DFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, image2DViewOf3D, sampler2DViewOf3D); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceImage2DViewOf3DFeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceImage2DViewOf3DFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (image2DViewOf3D == rhs.image2DViewOf3D) && (sampler2DViewOf3D == rhs.sampler2DViewOf3D); # endif } bool operator!=(PhysicalDeviceImage2DViewOf3DFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 image2DViewOf3D = {}; VULKAN_HPP_NAMESPACE::Bool32 sampler2DViewOf3D = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceImage2DViewOf3DFeaturesEXT) == sizeof(VkPhysicalDeviceImage2DViewOf3DFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceImage2DViewOf3DFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceImage2DViewOf3DFeaturesEXT; }; struct PhysicalDeviceImageDrmFormatModifierInfoEXT { using NativeType = VkPhysicalDeviceImageDrmFormatModifierInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT(uint64_t drmFormatModifier_ = {}, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t *pQueueFamilyIndices_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), drmFormatModifier(drmFormatModifier_), sharingMode(sharingMode_), queueFamilyIndexCount(queueFamilyIndexCount_), pQueueFamilyIndices(pQueueFamilyIndices_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT(PhysicalDeviceImageDrmFormatModifierInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImageDrmFormatModifierInfoEXT(VkPhysicalDeviceImageDrmFormatModifierInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceImageDrmFormatModifierInfoEXT(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PhysicalDeviceImageDrmFormatModifierInfoEXT(uint64_t drmFormatModifier_, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &queueFamilyIndices_, const void *pNext_ = nullptr) : pNext(pNext_) , drmFormatModifier(drmFormatModifier_) , sharingMode(sharingMode_) , queueFamilyIndexCount(static_cast(queueFamilyIndices_.size())) , pQueueFamilyIndices(queueFamilyIndices_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceImageDrmFormatModifierInfoEXT &operator=(PhysicalDeviceImageDrmFormatModifierInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImageDrmFormatModifierInfoEXT &operator=(VkPhysicalDeviceImageDrmFormatModifierInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT &setDrmFormatModifier(uint64_t drmFormatModifier_) VULKAN_HPP_NOEXCEPT { drmFormatModifier = drmFormatModifier_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT &setSharingMode(VULKAN_HPP_NAMESPACE::SharingMode sharingMode_) VULKAN_HPP_NOEXCEPT { sharingMode = sharingMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT &setQueueFamilyIndexCount(uint32_t queueFamilyIndexCount_) VULKAN_HPP_NOEXCEPT { queueFamilyIndexCount = queueFamilyIndexCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT &setPQueueFamilyIndices(const uint32_t *pQueueFamilyIndices_) VULKAN_HPP_NOEXCEPT { pQueueFamilyIndices = pQueueFamilyIndices_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndices(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &queueFamilyIndices_) VULKAN_HPP_NOEXCEPT { queueFamilyIndexCount = static_cast(queueFamilyIndices_.size()); pQueueFamilyIndices = queueFamilyIndices_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, drmFormatModifier, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceImageDrmFormatModifierInfoEXT const &) const = default; #else bool operator==(PhysicalDeviceImageDrmFormatModifierInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (drmFormatModifier == rhs.drmFormatModifier) && (sharingMode == rhs.sharingMode) && (queueFamilyIndexCount == rhs.queueFamilyIndexCount) && (pQueueFamilyIndices == rhs.pQueueFamilyIndices); # endif } bool operator!=(PhysicalDeviceImageDrmFormatModifierInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT; const void *pNext = {}; uint64_t drmFormatModifier = {}; VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; uint32_t queueFamilyIndexCount = {}; const uint32_t *pQueueFamilyIndices = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT) == sizeof(VkPhysicalDeviceImageDrmFormatModifierInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceImageDrmFormatModifierInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceImageDrmFormatModifierInfoEXT; }; struct PhysicalDeviceImageFormatInfo2 { using NativeType = VkPhysicalDeviceImageFormatInfo2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageFormatInfo2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), format(format_), type(type_), tiling(tiling_), usage(usage_), flags(flags_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2(PhysicalDeviceImageFormatInfo2 const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImageFormatInfo2(VkPhysicalDeviceImageFormatInfo2 const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceImageFormatInfo2(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceImageFormatInfo2 &operator=(PhysicalDeviceImageFormatInfo2 const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImageFormatInfo2 &operator=(VkPhysicalDeviceImageFormatInfo2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 &setFormat(VULKAN_HPP_NAMESPACE::Format format_) VULKAN_HPP_NOEXCEPT { format = format_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 &setType(VULKAN_HPP_NAMESPACE::ImageType type_) VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 &setTiling(VULKAN_HPP_NAMESPACE::ImageTiling tiling_) VULKAN_HPP_NOEXCEPT { tiling = tiling_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 &setUsage(VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_) VULKAN_HPP_NOEXCEPT { usage = usage_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 &setFlags(VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, format, type, tiling, usage, flags); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceImageFormatInfo2 const &) const = default; #else bool operator==(PhysicalDeviceImageFormatInfo2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (format == rhs.format) && (type == rhs.type) && (tiling == rhs.tiling) && (usage == rhs.usage) && (flags == rhs.flags); # endif } bool operator!=(PhysicalDeviceImageFormatInfo2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D; VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2) == sizeof(VkPhysicalDeviceImageFormatInfo2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceImageFormatInfo2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceImageFormatInfo2; }; using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2; struct PhysicalDeviceImageRobustnessFeatures { using NativeType = VkPhysicalDeviceImageRobustnessFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageRobustnessFeatures; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeatures(VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), robustImageAccess(robustImageAccess_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeatures(PhysicalDeviceImageRobustnessFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImageRobustnessFeatures(VkPhysicalDeviceImageRobustnessFeatures const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceImageRobustnessFeatures(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceImageRobustnessFeatures &operator=(PhysicalDeviceImageRobustnessFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImageRobustnessFeatures &operator=(VkPhysicalDeviceImageRobustnessFeatures const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeatures &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeatures &setRobustImageAccess(VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_) VULKAN_HPP_NOEXCEPT { robustImageAccess = robustImageAccess_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceImageRobustnessFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceImageRobustnessFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, robustImageAccess); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceImageRobustnessFeatures const &) const = default; #else bool operator==(PhysicalDeviceImageRobustnessFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (robustImageAccess == rhs.robustImageAccess); # endif } bool operator!=(PhysicalDeviceImageRobustnessFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageRobustnessFeatures; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures) == sizeof(VkPhysicalDeviceImageRobustnessFeatures), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceImageRobustnessFeatures is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceImageRobustnessFeatures; }; using PhysicalDeviceImageRobustnessFeaturesEXT = PhysicalDeviceImageRobustnessFeatures; struct PhysicalDeviceImageViewImageFormatInfoEXT { using NativeType = VkPhysicalDeviceImageViewImageFormatInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT(VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), imageViewType(imageViewType_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT(PhysicalDeviceImageViewImageFormatInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImageViewImageFormatInfoEXT(VkPhysicalDeviceImageViewImageFormatInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceImageViewImageFormatInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceImageViewImageFormatInfoEXT &operator=(PhysicalDeviceImageViewImageFormatInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImageViewImageFormatInfoEXT &operator=(VkPhysicalDeviceImageViewImageFormatInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewImageFormatInfoEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewImageFormatInfoEXT & setImageViewType(VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_) VULKAN_HPP_NOEXCEPT { imageViewType = imageViewType_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceImageViewImageFormatInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceImageViewImageFormatInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, imageViewType); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceImageViewImageFormatInfoEXT const &) const = default; #else bool operator==(PhysicalDeviceImageViewImageFormatInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (imageViewType == rhs.imageViewType); # endif } bool operator!=(PhysicalDeviceImageViewImageFormatInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::ImageViewType imageViewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT) == sizeof(VkPhysicalDeviceImageViewImageFormatInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceImageViewImageFormatInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceImageViewImageFormatInfoEXT; }; struct PhysicalDeviceImageViewMinLodFeaturesEXT { using NativeType = VkPhysicalDeviceImageViewMinLodFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewMinLodFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 minLod_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), minLod(minLod_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewMinLodFeaturesEXT(PhysicalDeviceImageViewMinLodFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImageViewMinLodFeaturesEXT(VkPhysicalDeviceImageViewMinLodFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceImageViewMinLodFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceImageViewMinLodFeaturesEXT &operator=(PhysicalDeviceImageViewMinLodFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImageViewMinLodFeaturesEXT &operator=(VkPhysicalDeviceImageViewMinLodFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewMinLodFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewMinLodFeaturesEXT &setMinLod(VULKAN_HPP_NAMESPACE::Bool32 minLod_) VULKAN_HPP_NOEXCEPT { minLod = minLod_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceImageViewMinLodFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceImageViewMinLodFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, minLod); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceImageViewMinLodFeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceImageViewMinLodFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (minLod == rhs.minLod); # endif } bool operator!=(PhysicalDeviceImageViewMinLodFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 minLod = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT) == sizeof(VkPhysicalDeviceImageViewMinLodFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceImageViewMinLodFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceImageViewMinLodFeaturesEXT; }; struct PhysicalDeviceImagelessFramebufferFeatures { using NativeType = VkPhysicalDeviceImagelessFramebufferFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures(VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), imagelessFramebuffer(imagelessFramebuffer_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures(PhysicalDeviceImagelessFramebufferFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImagelessFramebufferFeatures(VkPhysicalDeviceImagelessFramebufferFeatures const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceImagelessFramebufferFeatures(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceImagelessFramebufferFeatures &operator=(PhysicalDeviceImagelessFramebufferFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImagelessFramebufferFeatures &operator=(VkPhysicalDeviceImagelessFramebufferFeatures const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures & setImagelessFramebuffer(VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_) VULKAN_HPP_NOEXCEPT { imagelessFramebuffer = imagelessFramebuffer_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceImagelessFramebufferFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceImagelessFramebufferFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, imagelessFramebuffer); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceImagelessFramebufferFeatures const &) const = default; #else bool operator==(PhysicalDeviceImagelessFramebufferFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (imagelessFramebuffer == rhs.imagelessFramebuffer); # endif } bool operator!=(PhysicalDeviceImagelessFramebufferFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures) == sizeof(VkPhysicalDeviceImagelessFramebufferFeatures), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceImagelessFramebufferFeatures is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceImagelessFramebufferFeatures; }; using PhysicalDeviceImagelessFramebufferFeaturesKHR = PhysicalDeviceImagelessFramebufferFeatures; struct PhysicalDeviceIndexTypeUint8FeaturesEXT { using NativeType = VkPhysicalDeviceIndexTypeUint8FeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), indexTypeUint8(indexTypeUint8_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT(PhysicalDeviceIndexTypeUint8FeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceIndexTypeUint8FeaturesEXT(VkPhysicalDeviceIndexTypeUint8FeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceIndexTypeUint8FeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceIndexTypeUint8FeaturesEXT &operator=(PhysicalDeviceIndexTypeUint8FeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceIndexTypeUint8FeaturesEXT &operator=(VkPhysicalDeviceIndexTypeUint8FeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8FeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8FeaturesEXT &setIndexTypeUint8(VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_) VULKAN_HPP_NOEXCEPT { indexTypeUint8 = indexTypeUint8_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, indexTypeUint8); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceIndexTypeUint8FeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceIndexTypeUint8FeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (indexTypeUint8 == rhs.indexTypeUint8); # endif } bool operator!=(PhysicalDeviceIndexTypeUint8FeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8 = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT) == sizeof(VkPhysicalDeviceIndexTypeUint8FeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceIndexTypeUint8FeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceIndexTypeUint8FeaturesEXT; }; struct PhysicalDeviceInheritedViewportScissorFeaturesNV { using NativeType = VkPhysicalDeviceInheritedViewportScissorFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceInheritedViewportScissorFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), inheritedViewportScissor2D(inheritedViewportScissor2D_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceInheritedViewportScissorFeaturesNV(PhysicalDeviceInheritedViewportScissorFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceInheritedViewportScissorFeaturesNV(VkPhysicalDeviceInheritedViewportScissorFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceInheritedViewportScissorFeaturesNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceInheritedViewportScissorFeaturesNV &operator=(PhysicalDeviceInheritedViewportScissorFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceInheritedViewportScissorFeaturesNV &operator=(VkPhysicalDeviceInheritedViewportScissorFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInheritedViewportScissorFeaturesNV &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInheritedViewportScissorFeaturesNV & setInheritedViewportScissor2D(VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D_) VULKAN_HPP_NOEXCEPT { inheritedViewportScissor2D = inheritedViewportScissor2D_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, inheritedViewportScissor2D); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceInheritedViewportScissorFeaturesNV const &) const = default; #else bool operator==(PhysicalDeviceInheritedViewportScissorFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (inheritedViewportScissor2D == rhs.inheritedViewportScissor2D); # endif } bool operator!=(PhysicalDeviceInheritedViewportScissorFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV) == sizeof(VkPhysicalDeviceInheritedViewportScissorFeaturesNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceInheritedViewportScissorFeaturesNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceInheritedViewportScissorFeaturesNV; }; struct PhysicalDeviceInlineUniformBlockFeatures { using NativeType = VkPhysicalDeviceInlineUniformBlockFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockFeatures; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeatures(VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), inlineUniformBlock(inlineUniformBlock_), descriptorBindingInlineUniformBlockUpdateAfterBind(descriptorBindingInlineUniformBlockUpdateAfterBind_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeatures(PhysicalDeviceInlineUniformBlockFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceInlineUniformBlockFeatures(VkPhysicalDeviceInlineUniformBlockFeatures const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceInlineUniformBlockFeatures(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceInlineUniformBlockFeatures &operator=(PhysicalDeviceInlineUniformBlockFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceInlineUniformBlockFeatures &operator=(VkPhysicalDeviceInlineUniformBlockFeatures const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & setInlineUniformBlock(VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_) VULKAN_HPP_NOEXCEPT { inlineUniformBlock = inlineUniformBlock_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures &setDescriptorBindingInlineUniformBlockUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_) VULKAN_HPP_NOEXCEPT { descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceInlineUniformBlockFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceInlineUniformBlockFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, inlineUniformBlock, descriptorBindingInlineUniformBlockUpdateAfterBind); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceInlineUniformBlockFeatures const &) const = default; #else bool operator==(PhysicalDeviceInlineUniformBlockFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (inlineUniformBlock == rhs.inlineUniformBlock) && (descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind); # endif } bool operator!=(PhysicalDeviceInlineUniformBlockFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockFeatures; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {}; VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures) == sizeof(VkPhysicalDeviceInlineUniformBlockFeatures), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceInlineUniformBlockFeatures is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceInlineUniformBlockFeatures; }; using PhysicalDeviceInlineUniformBlockFeaturesEXT = PhysicalDeviceInlineUniformBlockFeatures; struct PhysicalDeviceInlineUniformBlockProperties { using NativeType = VkPhysicalDeviceInlineUniformBlockProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockProperties(uint32_t maxInlineUniformBlockSize_ = {}, uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {}, uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), maxInlineUniformBlockSize(maxInlineUniformBlockSize_), maxPerStageDescriptorInlineUniformBlocks(maxPerStageDescriptorInlineUniformBlocks_), maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks(maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_), maxDescriptorSetInlineUniformBlocks(maxDescriptorSetInlineUniformBlocks_), maxDescriptorSetUpdateAfterBindInlineUniformBlocks(maxDescriptorSetUpdateAfterBindInlineUniformBlocks_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockProperties(PhysicalDeviceInlineUniformBlockProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceInlineUniformBlockProperties(VkPhysicalDeviceInlineUniformBlockProperties const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceInlineUniformBlockProperties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceInlineUniformBlockProperties &operator=(PhysicalDeviceInlineUniformBlockProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceInlineUniformBlockProperties &operator=(VkPhysicalDeviceInlineUniformBlockProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceInlineUniformBlockProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceInlineUniformBlockProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, maxInlineUniformBlockSize, maxPerStageDescriptorInlineUniformBlocks, maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks, maxDescriptorSetInlineUniformBlocks, maxDescriptorSetUpdateAfterBindInlineUniformBlocks); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceInlineUniformBlockProperties const &) const = default; #else bool operator==(PhysicalDeviceInlineUniformBlockProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize) && (maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks) && (maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks) && (maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks) && (maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks); # endif } bool operator!=(PhysicalDeviceInlineUniformBlockProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockProperties; void *pNext = {}; uint32_t maxInlineUniformBlockSize = {}; uint32_t maxPerStageDescriptorInlineUniformBlocks = {}; uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {}; uint32_t maxDescriptorSetInlineUniformBlocks = {}; uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties) == sizeof(VkPhysicalDeviceInlineUniformBlockProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceInlineUniformBlockProperties is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceInlineUniformBlockProperties; }; using PhysicalDeviceInlineUniformBlockPropertiesEXT = PhysicalDeviceInlineUniformBlockProperties; struct PhysicalDeviceInvocationMaskFeaturesHUAWEI { using NativeType = VkPhysicalDeviceInvocationMaskFeaturesHUAWEI; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceInvocationMaskFeaturesHUAWEI(VULKAN_HPP_NAMESPACE::Bool32 invocationMask_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), invocationMask(invocationMask_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceInvocationMaskFeaturesHUAWEI(PhysicalDeviceInvocationMaskFeaturesHUAWEI const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceInvocationMaskFeaturesHUAWEI(VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceInvocationMaskFeaturesHUAWEI(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceInvocationMaskFeaturesHUAWEI &operator=(PhysicalDeviceInvocationMaskFeaturesHUAWEI const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceInvocationMaskFeaturesHUAWEI &operator=(VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInvocationMaskFeaturesHUAWEI &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInvocationMaskFeaturesHUAWEI &setInvocationMask(VULKAN_HPP_NAMESPACE::Bool32 invocationMask_) VULKAN_HPP_NOEXCEPT { invocationMask = invocationMask_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, invocationMask); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceInvocationMaskFeaturesHUAWEI const &) const = default; #else bool operator==(PhysicalDeviceInvocationMaskFeaturesHUAWEI const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (invocationMask == rhs.invocationMask); # endif } bool operator!=(PhysicalDeviceInvocationMaskFeaturesHUAWEI const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 invocationMask = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI) == sizeof(VkPhysicalDeviceInvocationMaskFeaturesHUAWEI), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceInvocationMaskFeaturesHUAWEI is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceInvocationMaskFeaturesHUAWEI; }; struct PhysicalDeviceLimits { using NativeType = VkPhysicalDeviceLimits; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits(uint32_t maxImageDimension1D_ = {}, uint32_t maxImageDimension2D_ = {}, uint32_t maxImageDimension3D_ = {}, uint32_t maxImageDimensionCube_ = {}, uint32_t maxImageArrayLayers_ = {}, uint32_t maxTexelBufferElements_ = {}, uint32_t maxUniformBufferRange_ = {}, uint32_t maxStorageBufferRange_ = {}, uint32_t maxPushConstantsSize_ = {}, uint32_t maxMemoryAllocationCount_ = {}, uint32_t maxSamplerAllocationCount_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize_ = {}, uint32_t maxBoundDescriptorSets_ = {}, uint32_t maxPerStageDescriptorSamplers_ = {}, uint32_t maxPerStageDescriptorUniformBuffers_ = {}, uint32_t maxPerStageDescriptorStorageBuffers_ = {}, uint32_t maxPerStageDescriptorSampledImages_ = {}, uint32_t maxPerStageDescriptorStorageImages_ = {}, uint32_t maxPerStageDescriptorInputAttachments_ = {}, uint32_t maxPerStageResources_ = {}, uint32_t maxDescriptorSetSamplers_ = {}, uint32_t maxDescriptorSetUniformBuffers_ = {}, uint32_t maxDescriptorSetUniformBuffersDynamic_ = {}, uint32_t maxDescriptorSetStorageBuffers_ = {}, uint32_t maxDescriptorSetStorageBuffersDynamic_ = {}, uint32_t maxDescriptorSetSampledImages_ = {}, uint32_t maxDescriptorSetStorageImages_ = {}, uint32_t maxDescriptorSetInputAttachments_ = {}, uint32_t maxVertexInputAttributes_ = {}, uint32_t maxVertexInputBindings_ = {}, uint32_t maxVertexInputAttributeOffset_ = {}, uint32_t maxVertexInputBindingStride_ = {}, uint32_t maxVertexOutputComponents_ = {}, uint32_t maxTessellationGenerationLevel_ = {}, uint32_t maxTessellationPatchSize_ = {}, uint32_t maxTessellationControlPerVertexInputComponents_ = {}, uint32_t maxTessellationControlPerVertexOutputComponents_ = {}, uint32_t maxTessellationControlPerPatchOutputComponents_ = {}, uint32_t maxTessellationControlTotalOutputComponents_ = {}, uint32_t maxTessellationEvaluationInputComponents_ = {}, uint32_t maxTessellationEvaluationOutputComponents_ = {}, uint32_t maxGeometryShaderInvocations_ = {}, uint32_t maxGeometryInputComponents_ = {}, uint32_t maxGeometryOutputComponents_ = {}, uint32_t maxGeometryOutputVertices_ = {}, uint32_t maxGeometryTotalOutputComponents_ = {}, uint32_t maxFragmentInputComponents_ = {}, uint32_t maxFragmentOutputAttachments_ = {}, uint32_t maxFragmentDualSrcAttachments_ = {}, uint32_t maxFragmentCombinedOutputResources_ = {}, uint32_t maxComputeSharedMemorySize_ = {}, std::array const &maxComputeWorkGroupCount_ = {}, uint32_t maxComputeWorkGroupInvocations_ = {}, std::array const &maxComputeWorkGroupSize_ = {}, uint32_t subPixelPrecisionBits_ = {}, uint32_t subTexelPrecisionBits_ = {}, uint32_t mipmapPrecisionBits_ = {}, uint32_t maxDrawIndexedIndexValue_ = {}, uint32_t maxDrawIndirectCount_ = {}, float maxSamplerLodBias_ = {}, float maxSamplerAnisotropy_ = {}, uint32_t maxViewports_ = {}, std::array const &maxViewportDimensions_ = {}, std::array const &viewportBoundsRange_ = {}, uint32_t viewportSubPixelBits_ = {}, size_t minMemoryMapAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment_ = {}, int32_t minTexelOffset_ = {}, uint32_t maxTexelOffset_ = {}, int32_t minTexelGatherOffset_ = {}, uint32_t maxTexelGatherOffset_ = {}, float minInterpolationOffset_ = {}, float maxInterpolationOffset_ = {}, uint32_t subPixelInterpolationOffsetBits_ = {}, uint32_t maxFramebufferWidth_ = {}, uint32_t maxFramebufferHeight_ = {}, uint32_t maxFramebufferLayers_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts_ = {}, uint32_t maxColorAttachments_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts_ = {}, uint32_t maxSampleMaskWords_ = {}, VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics_ = {}, float timestampPeriod_ = {}, uint32_t maxClipDistances_ = {}, uint32_t maxCullDistances_ = {}, uint32_t maxCombinedClipAndCullDistances_ = {}, uint32_t discreteQueuePriorities_ = {}, std::array const &pointSizeRange_ = {}, std::array const &lineWidthRange_ = {}, float pointSizeGranularity_ = {}, float lineWidthGranularity_ = {}, VULKAN_HPP_NAMESPACE::Bool32 strictLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize_ = {}) VULKAN_HPP_NOEXCEPT : maxImageDimension1D(maxImageDimension1D_), maxImageDimension2D(maxImageDimension2D_), maxImageDimension3D(maxImageDimension3D_), maxImageDimensionCube(maxImageDimensionCube_), maxImageArrayLayers(maxImageArrayLayers_), maxTexelBufferElements(maxTexelBufferElements_), maxUniformBufferRange(maxUniformBufferRange_), maxStorageBufferRange(maxStorageBufferRange_), maxPushConstantsSize(maxPushConstantsSize_), maxMemoryAllocationCount(maxMemoryAllocationCount_), maxSamplerAllocationCount(maxSamplerAllocationCount_), bufferImageGranularity(bufferImageGranularity_), sparseAddressSpaceSize(sparseAddressSpaceSize_), maxBoundDescriptorSets(maxBoundDescriptorSets_), maxPerStageDescriptorSamplers(maxPerStageDescriptorSamplers_), maxPerStageDescriptorUniformBuffers(maxPerStageDescriptorUniformBuffers_), maxPerStageDescriptorStorageBuffers(maxPerStageDescriptorStorageBuffers_), maxPerStageDescriptorSampledImages(maxPerStageDescriptorSampledImages_), maxPerStageDescriptorStorageImages(maxPerStageDescriptorStorageImages_), maxPerStageDescriptorInputAttachments(maxPerStageDescriptorInputAttachments_), maxPerStageResources(maxPerStageResources_), maxDescriptorSetSamplers(maxDescriptorSetSamplers_), maxDescriptorSetUniformBuffers(maxDescriptorSetUniformBuffers_), maxDescriptorSetUniformBuffersDynamic(maxDescriptorSetUniformBuffersDynamic_), maxDescriptorSetStorageBuffers(maxDescriptorSetStorageBuffers_), maxDescriptorSetStorageBuffersDynamic(maxDescriptorSetStorageBuffersDynamic_), maxDescriptorSetSampledImages(maxDescriptorSetSampledImages_), maxDescriptorSetStorageImages(maxDescriptorSetStorageImages_), maxDescriptorSetInputAttachments(maxDescriptorSetInputAttachments_), maxVertexInputAttributes(maxVertexInputAttributes_), maxVertexInputBindings(maxVertexInputBindings_), maxVertexInputAttributeOffset(maxVertexInputAttributeOffset_), maxVertexInputBindingStride(maxVertexInputBindingStride_), maxVertexOutputComponents(maxVertexOutputComponents_), maxTessellationGenerationLevel(maxTessellationGenerationLevel_), maxTessellationPatchSize(maxTessellationPatchSize_), maxTessellationControlPerVertexInputComponents(maxTessellationControlPerVertexInputComponents_), maxTessellationControlPerVertexOutputComponents(maxTessellationControlPerVertexOutputComponents_), maxTessellationControlPerPatchOutputComponents(maxTessellationControlPerPatchOutputComponents_), maxTessellationControlTotalOutputComponents(maxTessellationControlTotalOutputComponents_), maxTessellationEvaluationInputComponents(maxTessellationEvaluationInputComponents_), maxTessellationEvaluationOutputComponents(maxTessellationEvaluationOutputComponents_), maxGeometryShaderInvocations(maxGeometryShaderInvocations_), maxGeometryInputComponents(maxGeometryInputComponents_), maxGeometryOutputComponents(maxGeometryOutputComponents_), maxGeometryOutputVertices(maxGeometryOutputVertices_), maxGeometryTotalOutputComponents(maxGeometryTotalOutputComponents_), maxFragmentInputComponents(maxFragmentInputComponents_), maxFragmentOutputAttachments(maxFragmentOutputAttachments_), maxFragmentDualSrcAttachments(maxFragmentDualSrcAttachments_), maxFragmentCombinedOutputResources(maxFragmentCombinedOutputResources_), maxComputeSharedMemorySize(maxComputeSharedMemorySize_), maxComputeWorkGroupCount(maxComputeWorkGroupCount_), maxComputeWorkGroupInvocations(maxComputeWorkGroupInvocations_), maxComputeWorkGroupSize(maxComputeWorkGroupSize_), subPixelPrecisionBits(subPixelPrecisionBits_), subTexelPrecisionBits(subTexelPrecisionBits_), mipmapPrecisionBits(mipmapPrecisionBits_), maxDrawIndexedIndexValue(maxDrawIndexedIndexValue_), maxDrawIndirectCount(maxDrawIndirectCount_), maxSamplerLodBias(maxSamplerLodBias_), maxSamplerAnisotropy(maxSamplerAnisotropy_), maxViewports(maxViewports_), maxViewportDimensions(maxViewportDimensions_), viewportBoundsRange(viewportBoundsRange_), viewportSubPixelBits(viewportSubPixelBits_), minMemoryMapAlignment(minMemoryMapAlignment_), minTexelBufferOffsetAlignment(minTexelBufferOffsetAlignment_), minUniformBufferOffsetAlignment(minUniformBufferOffsetAlignment_), minStorageBufferOffsetAlignment(minStorageBufferOffsetAlignment_), minTexelOffset(minTexelOffset_), maxTexelOffset(maxTexelOffset_), minTexelGatherOffset(minTexelGatherOffset_), maxTexelGatherOffset(maxTexelGatherOffset_), minInterpolationOffset(minInterpolationOffset_), maxInterpolationOffset(maxInterpolationOffset_), subPixelInterpolationOffsetBits(subPixelInterpolationOffsetBits_), maxFramebufferWidth(maxFramebufferWidth_), maxFramebufferHeight(maxFramebufferHeight_), maxFramebufferLayers(maxFramebufferLayers_), framebufferColorSampleCounts(framebufferColorSampleCounts_), framebufferDepthSampleCounts(framebufferDepthSampleCounts_), framebufferStencilSampleCounts(framebufferStencilSampleCounts_), framebufferNoAttachmentsSampleCounts(framebufferNoAttachmentsSampleCounts_), maxColorAttachments(maxColorAttachments_), sampledImageColorSampleCounts(sampledImageColorSampleCounts_), sampledImageIntegerSampleCounts(sampledImageIntegerSampleCounts_), sampledImageDepthSampleCounts(sampledImageDepthSampleCounts_), sampledImageStencilSampleCounts(sampledImageStencilSampleCounts_), storageImageSampleCounts(storageImageSampleCounts_), maxSampleMaskWords(maxSampleMaskWords_), timestampComputeAndGraphics(timestampComputeAndGraphics_), timestampPeriod(timestampPeriod_), maxClipDistances(maxClipDistances_), maxCullDistances(maxCullDistances_), maxCombinedClipAndCullDistances(maxCombinedClipAndCullDistances_), discreteQueuePriorities(discreteQueuePriorities_), pointSizeRange(pointSizeRange_), lineWidthRange(lineWidthRange_), pointSizeGranularity(pointSizeGranularity_), lineWidthGranularity(lineWidthGranularity_), strictLines(strictLines_), standardSampleLocations(standardSampleLocations_), optimalBufferCopyOffsetAlignment(optimalBufferCopyOffsetAlignment_), optimalBufferCopyRowPitchAlignment(optimalBufferCopyRowPitchAlignment_), nonCoherentAtomSize(nonCoherentAtomSize_) { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits(PhysicalDeviceLimits const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceLimits(VkPhysicalDeviceLimits const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceLimits(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceLimits &operator=(PhysicalDeviceLimits const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceLimits &operator=(VkPhysicalDeviceLimits const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceLimits const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceLimits &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, float const &, float const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, uint32_t const &, size_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, int32_t const &, uint32_t const &, int32_t const &, uint32_t const &, float const &, float const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, uint32_t const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, float const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, float const &, float const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &, VULKAN_HPP_NAMESPACE::DeviceSize const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(maxImageDimension1D, maxImageDimension2D, maxImageDimension3D, maxImageDimensionCube, maxImageArrayLayers, maxTexelBufferElements, maxUniformBufferRange, maxStorageBufferRange, maxPushConstantsSize, maxMemoryAllocationCount, maxSamplerAllocationCount, bufferImageGranularity, sparseAddressSpaceSize, maxBoundDescriptorSets, maxPerStageDescriptorSamplers, maxPerStageDescriptorUniformBuffers, maxPerStageDescriptorStorageBuffers, maxPerStageDescriptorSampledImages, maxPerStageDescriptorStorageImages, maxPerStageDescriptorInputAttachments, maxPerStageResources, maxDescriptorSetSamplers, maxDescriptorSetUniformBuffers, maxDescriptorSetUniformBuffersDynamic, maxDescriptorSetStorageBuffers, maxDescriptorSetStorageBuffersDynamic, maxDescriptorSetSampledImages, maxDescriptorSetStorageImages, maxDescriptorSetInputAttachments, maxVertexInputAttributes, maxVertexInputBindings, maxVertexInputAttributeOffset, maxVertexInputBindingStride, maxVertexOutputComponents, maxTessellationGenerationLevel, maxTessellationPatchSize, maxTessellationControlPerVertexInputComponents, maxTessellationControlPerVertexOutputComponents, maxTessellationControlPerPatchOutputComponents, maxTessellationControlTotalOutputComponents, maxTessellationEvaluationInputComponents, maxTessellationEvaluationOutputComponents, maxGeometryShaderInvocations, maxGeometryInputComponents, maxGeometryOutputComponents, maxGeometryOutputVertices, maxGeometryTotalOutputComponents, maxFragmentInputComponents, maxFragmentOutputAttachments, maxFragmentDualSrcAttachments, maxFragmentCombinedOutputResources, maxComputeSharedMemorySize, maxComputeWorkGroupCount, maxComputeWorkGroupInvocations, maxComputeWorkGroupSize, subPixelPrecisionBits, subTexelPrecisionBits, mipmapPrecisionBits, maxDrawIndexedIndexValue, maxDrawIndirectCount, maxSamplerLodBias, maxSamplerAnisotropy, maxViewports, maxViewportDimensions, viewportBoundsRange, viewportSubPixelBits, minMemoryMapAlignment, minTexelBufferOffsetAlignment, minUniformBufferOffsetAlignment, minStorageBufferOffsetAlignment, minTexelOffset, maxTexelOffset, minTexelGatherOffset, maxTexelGatherOffset, minInterpolationOffset, maxInterpolationOffset, subPixelInterpolationOffsetBits, maxFramebufferWidth, maxFramebufferHeight, maxFramebufferLayers, framebufferColorSampleCounts, framebufferDepthSampleCounts, framebufferStencilSampleCounts, framebufferNoAttachmentsSampleCounts, maxColorAttachments, sampledImageColorSampleCounts, sampledImageIntegerSampleCounts, sampledImageDepthSampleCounts, sampledImageStencilSampleCounts, storageImageSampleCounts, maxSampleMaskWords, timestampComputeAndGraphics, timestampPeriod, maxClipDistances, maxCullDistances, maxCombinedClipAndCullDistances, discreteQueuePriorities, pointSizeRange, lineWidthRange, pointSizeGranularity, lineWidthGranularity, strictLines, standardSampleLocations, optimalBufferCopyOffsetAlignment, optimalBufferCopyRowPitchAlignment, nonCoherentAtomSize); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceLimits const &) const = default; #else bool operator==(PhysicalDeviceLimits const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (maxImageDimension1D == rhs.maxImageDimension1D) && (maxImageDimension2D == rhs.maxImageDimension2D) && (maxImageDimension3D == rhs.maxImageDimension3D) && (maxImageDimensionCube == rhs.maxImageDimensionCube) && (maxImageArrayLayers == rhs.maxImageArrayLayers) && (maxTexelBufferElements == rhs.maxTexelBufferElements) && (maxUniformBufferRange == rhs.maxUniformBufferRange) && (maxStorageBufferRange == rhs.maxStorageBufferRange) && (maxPushConstantsSize == rhs.maxPushConstantsSize) && (maxMemoryAllocationCount == rhs.maxMemoryAllocationCount) && (maxSamplerAllocationCount == rhs.maxSamplerAllocationCount) && (bufferImageGranularity == rhs.bufferImageGranularity) && (sparseAddressSpaceSize == rhs.sparseAddressSpaceSize) && (maxBoundDescriptorSets == rhs.maxBoundDescriptorSets) && (maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers) && (maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers) && (maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers) && (maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages) && (maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages) && (maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments) && (maxPerStageResources == rhs.maxPerStageResources) && (maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers) && (maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers) && (maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic) && (maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers) && (maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic) && (maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages) && (maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages) && (maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments) && (maxVertexInputAttributes == rhs.maxVertexInputAttributes) && (maxVertexInputBindings == rhs.maxVertexInputBindings) && (maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset) && (maxVertexInputBindingStride == rhs.maxVertexInputBindingStride) && (maxVertexOutputComponents == rhs.maxVertexOutputComponents) && (maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel) && (maxTessellationPatchSize == rhs.maxTessellationPatchSize) && (maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents) && (maxTessellationControlPerVertexOutputComponents == rhs.maxTessellationControlPerVertexOutputComponents) && (maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents) && (maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents) && (maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents) && (maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents) && (maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations) && (maxGeometryInputComponents == rhs.maxGeometryInputComponents) && (maxGeometryOutputComponents == rhs.maxGeometryOutputComponents) && (maxGeometryOutputVertices == rhs.maxGeometryOutputVertices) && (maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents) && (maxFragmentInputComponents == rhs.maxFragmentInputComponents) && (maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments) && (maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments) && (maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources) && (maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize) && (maxComputeWorkGroupCount == rhs.maxComputeWorkGroupCount) && (maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations) && (maxComputeWorkGroupSize == rhs.maxComputeWorkGroupSize) && (subPixelPrecisionBits == rhs.subPixelPrecisionBits) && (subTexelPrecisionBits == rhs.subTexelPrecisionBits) && (mipmapPrecisionBits == rhs.mipmapPrecisionBits) && (maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue) && (maxDrawIndirectCount == rhs.maxDrawIndirectCount) && (maxSamplerLodBias == rhs.maxSamplerLodBias) && (maxSamplerAnisotropy == rhs.maxSamplerAnisotropy) && (maxViewports == rhs.maxViewports) && (maxViewportDimensions == rhs.maxViewportDimensions) && (viewportBoundsRange == rhs.viewportBoundsRange) && (viewportSubPixelBits == rhs.viewportSubPixelBits) && (minMemoryMapAlignment == rhs.minMemoryMapAlignment) && (minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment) && (minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment) && (minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment) && (minTexelOffset == rhs.minTexelOffset) && (maxTexelOffset == rhs.maxTexelOffset) && (minTexelGatherOffset == rhs.minTexelGatherOffset) && (maxTexelGatherOffset == rhs.maxTexelGatherOffset) && (minInterpolationOffset == rhs.minInterpolationOffset) && (maxInterpolationOffset == rhs.maxInterpolationOffset) && (subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits) && (maxFramebufferWidth == rhs.maxFramebufferWidth) && (maxFramebufferHeight == rhs.maxFramebufferHeight) && (maxFramebufferLayers == rhs.maxFramebufferLayers) && (framebufferColorSampleCounts == rhs.framebufferColorSampleCounts) && (framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts) && (framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts) && (framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts) && (maxColorAttachments == rhs.maxColorAttachments) && (sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts) && (sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts) && (sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts) && (sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts) && (storageImageSampleCounts == rhs.storageImageSampleCounts) && (maxSampleMaskWords == rhs.maxSampleMaskWords) && (timestampComputeAndGraphics == rhs.timestampComputeAndGraphics) && (timestampPeriod == rhs.timestampPeriod) && (maxClipDistances == rhs.maxClipDistances) && (maxCullDistances == rhs.maxCullDistances) && (maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances) && (discreteQueuePriorities == rhs.discreteQueuePriorities) && (pointSizeRange == rhs.pointSizeRange) && (lineWidthRange == rhs.lineWidthRange) && (pointSizeGranularity == rhs.pointSizeGranularity) && (lineWidthGranularity == rhs.lineWidthGranularity) && (strictLines == rhs.strictLines) && (standardSampleLocations == rhs.standardSampleLocations) && (optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment) && (optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment) && (nonCoherentAtomSize == rhs.nonCoherentAtomSize); # endif } bool operator!=(PhysicalDeviceLimits const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t maxImageDimension1D = {}; uint32_t maxImageDimension2D = {}; uint32_t maxImageDimension3D = {}; uint32_t maxImageDimensionCube = {}; uint32_t maxImageArrayLayers = {}; uint32_t maxTexelBufferElements = {}; uint32_t maxUniformBufferRange = {}; uint32_t maxStorageBufferRange = {}; uint32_t maxPushConstantsSize = {}; uint32_t maxMemoryAllocationCount = {}; uint32_t maxSamplerAllocationCount = {}; VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity = {}; VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize = {}; uint32_t maxBoundDescriptorSets = {}; uint32_t maxPerStageDescriptorSamplers = {}; uint32_t maxPerStageDescriptorUniformBuffers = {}; uint32_t maxPerStageDescriptorStorageBuffers = {}; uint32_t maxPerStageDescriptorSampledImages = {}; uint32_t maxPerStageDescriptorStorageImages = {}; uint32_t maxPerStageDescriptorInputAttachments = {}; uint32_t maxPerStageResources = {}; uint32_t maxDescriptorSetSamplers = {}; uint32_t maxDescriptorSetUniformBuffers = {}; uint32_t maxDescriptorSetUniformBuffersDynamic = {}; uint32_t maxDescriptorSetStorageBuffers = {}; uint32_t maxDescriptorSetStorageBuffersDynamic = {}; uint32_t maxDescriptorSetSampledImages = {}; uint32_t maxDescriptorSetStorageImages = {}; uint32_t maxDescriptorSetInputAttachments = {}; uint32_t maxVertexInputAttributes = {}; uint32_t maxVertexInputBindings = {}; uint32_t maxVertexInputAttributeOffset = {}; uint32_t maxVertexInputBindingStride = {}; uint32_t maxVertexOutputComponents = {}; uint32_t maxTessellationGenerationLevel = {}; uint32_t maxTessellationPatchSize = {}; uint32_t maxTessellationControlPerVertexInputComponents = {}; uint32_t maxTessellationControlPerVertexOutputComponents = {}; uint32_t maxTessellationControlPerPatchOutputComponents = {}; uint32_t maxTessellationControlTotalOutputComponents = {}; uint32_t maxTessellationEvaluationInputComponents = {}; uint32_t maxTessellationEvaluationOutputComponents = {}; uint32_t maxGeometryShaderInvocations = {}; uint32_t maxGeometryInputComponents = {}; uint32_t maxGeometryOutputComponents = {}; uint32_t maxGeometryOutputVertices = {}; uint32_t maxGeometryTotalOutputComponents = {}; uint32_t maxFragmentInputComponents = {}; uint32_t maxFragmentOutputAttachments = {}; uint32_t maxFragmentDualSrcAttachments = {}; uint32_t maxFragmentCombinedOutputResources = {}; uint32_t maxComputeSharedMemorySize = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxComputeWorkGroupCount = {}; uint32_t maxComputeWorkGroupInvocations = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxComputeWorkGroupSize = {}; uint32_t subPixelPrecisionBits = {}; uint32_t subTexelPrecisionBits = {}; uint32_t mipmapPrecisionBits = {}; uint32_t maxDrawIndexedIndexValue = {}; uint32_t maxDrawIndirectCount = {}; float maxSamplerLodBias = {}; float maxSamplerAnisotropy = {}; uint32_t maxViewports = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxViewportDimensions = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D viewportBoundsRange = {}; uint32_t viewportSubPixelBits = {}; size_t minMemoryMapAlignment = {}; VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment = {}; VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment = {}; VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment = {}; int32_t minTexelOffset = {}; uint32_t maxTexelOffset = {}; int32_t minTexelGatherOffset = {}; uint32_t maxTexelGatherOffset = {}; float minInterpolationOffset = {}; float maxInterpolationOffset = {}; uint32_t subPixelInterpolationOffsetBits = {}; uint32_t maxFramebufferWidth = {}; uint32_t maxFramebufferHeight = {}; uint32_t maxFramebufferLayers = {}; VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts = {}; VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts = {}; VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts = {}; VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts = {}; uint32_t maxColorAttachments = {}; VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts = {}; VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts = {}; VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts = {}; VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts = {}; VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts = {}; uint32_t maxSampleMaskWords = {}; VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics = {}; float timestampPeriod = {}; uint32_t maxClipDistances = {}; uint32_t maxCullDistances = {}; uint32_t maxCombinedClipAndCullDistances = {}; uint32_t discreteQueuePriorities = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D pointSizeRange = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D lineWidthRange = {}; float pointSizeGranularity = {}; float lineWidthGranularity = {}; VULKAN_HPP_NAMESPACE::Bool32 strictLines = {}; VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations = {}; VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment = {}; VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment = {}; VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits) == sizeof(VkPhysicalDeviceLimits), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceLimits is not nothrow_move_constructible!"); struct PhysicalDeviceLineRasterizationFeaturesEXT { using NativeType = VkPhysicalDeviceLineRasterizationFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), rectangularLines(rectangularLines_), bresenhamLines(bresenhamLines_), smoothLines(smoothLines_), stippledRectangularLines(stippledRectangularLines_), stippledBresenhamLines(stippledBresenhamLines_), stippledSmoothLines(stippledSmoothLines_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT(PhysicalDeviceLineRasterizationFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceLineRasterizationFeaturesEXT(VkPhysicalDeviceLineRasterizationFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceLineRasterizationFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceLineRasterizationFeaturesEXT &operator=(PhysicalDeviceLineRasterizationFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceLineRasterizationFeaturesEXT &operator=(VkPhysicalDeviceLineRasterizationFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & setRectangularLines(VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_) VULKAN_HPP_NOEXCEPT { rectangularLines = rectangularLines_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT &setBresenhamLines(VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_) VULKAN_HPP_NOEXCEPT { bresenhamLines = bresenhamLines_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT &setSmoothLines(VULKAN_HPP_NAMESPACE::Bool32 smoothLines_) VULKAN_HPP_NOEXCEPT { smoothLines = smoothLines_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & setStippledRectangularLines(VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_) VULKAN_HPP_NOEXCEPT { stippledRectangularLines = stippledRectangularLines_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & setStippledBresenhamLines(VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_) VULKAN_HPP_NOEXCEPT { stippledBresenhamLines = stippledBresenhamLines_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & setStippledSmoothLines(VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_) VULKAN_HPP_NOEXCEPT { stippledSmoothLines = stippledSmoothLines_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceLineRasterizationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceLineRasterizationFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, rectangularLines, bresenhamLines, smoothLines, stippledRectangularLines, stippledBresenhamLines, stippledSmoothLines); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceLineRasterizationFeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceLineRasterizationFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (rectangularLines == rhs.rectangularLines) && (bresenhamLines == rhs.bresenhamLines) && (smoothLines == rhs.smoothLines) && (stippledRectangularLines == rhs.stippledRectangularLines) && (stippledBresenhamLines == rhs.stippledBresenhamLines) && (stippledSmoothLines == rhs.stippledSmoothLines); # endif } bool operator!=(PhysicalDeviceLineRasterizationFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 rectangularLines = {}; VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines = {}; VULKAN_HPP_NAMESPACE::Bool32 smoothLines = {}; VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines = {}; VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines = {}; VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT) == sizeof(VkPhysicalDeviceLineRasterizationFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceLineRasterizationFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceLineRasterizationFeaturesEXT; }; struct PhysicalDeviceLineRasterizationPropertiesEXT { using NativeType = VkPhysicalDeviceLineRasterizationPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT(uint32_t lineSubPixelPrecisionBits_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), lineSubPixelPrecisionBits(lineSubPixelPrecisionBits_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT(PhysicalDeviceLineRasterizationPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceLineRasterizationPropertiesEXT(VkPhysicalDeviceLineRasterizationPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceLineRasterizationPropertiesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceLineRasterizationPropertiesEXT &operator=(PhysicalDeviceLineRasterizationPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceLineRasterizationPropertiesEXT &operator=(VkPhysicalDeviceLineRasterizationPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceLineRasterizationPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceLineRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, lineSubPixelPrecisionBits); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceLineRasterizationPropertiesEXT const &) const = default; #else bool operator==(PhysicalDeviceLineRasterizationPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (lineSubPixelPrecisionBits == rhs.lineSubPixelPrecisionBits); # endif } bool operator!=(PhysicalDeviceLineRasterizationPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT; void *pNext = {}; uint32_t lineSubPixelPrecisionBits = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT) == sizeof(VkPhysicalDeviceLineRasterizationPropertiesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceLineRasterizationPropertiesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceLineRasterizationPropertiesEXT; }; struct PhysicalDeviceLinearColorAttachmentFeaturesNV { using NativeType = VkPhysicalDeviceLinearColorAttachmentFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceLinearColorAttachmentFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), linearColorAttachment(linearColorAttachment_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceLinearColorAttachmentFeaturesNV(PhysicalDeviceLinearColorAttachmentFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceLinearColorAttachmentFeaturesNV(VkPhysicalDeviceLinearColorAttachmentFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceLinearColorAttachmentFeaturesNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceLinearColorAttachmentFeaturesNV &operator=(PhysicalDeviceLinearColorAttachmentFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceLinearColorAttachmentFeaturesNV &operator=(VkPhysicalDeviceLinearColorAttachmentFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLinearColorAttachmentFeaturesNV &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLinearColorAttachmentFeaturesNV & setLinearColorAttachment(VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment_) VULKAN_HPP_NOEXCEPT { linearColorAttachment = linearColorAttachment_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, linearColorAttachment); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceLinearColorAttachmentFeaturesNV const &) const = default; #else bool operator==(PhysicalDeviceLinearColorAttachmentFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (linearColorAttachment == rhs.linearColorAttachment); # endif } bool operator!=(PhysicalDeviceLinearColorAttachmentFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV) == sizeof(VkPhysicalDeviceLinearColorAttachmentFeaturesNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceLinearColorAttachmentFeaturesNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceLinearColorAttachmentFeaturesNV; }; struct PhysicalDeviceMaintenance3Properties { using NativeType = VkPhysicalDeviceMaintenance3Properties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance3Properties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties(uint32_t maxPerSetDescriptors_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), maxPerSetDescriptors(maxPerSetDescriptors_), maxMemoryAllocationSize(maxMemoryAllocationSize_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties(PhysicalDeviceMaintenance3Properties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMaintenance3Properties(VkPhysicalDeviceMaintenance3Properties const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMaintenance3Properties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceMaintenance3Properties &operator=(PhysicalDeviceMaintenance3Properties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMaintenance3Properties &operator=(VkPhysicalDeviceMaintenance3Properties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceMaintenance3Properties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceMaintenance3Properties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, maxPerSetDescriptors, maxMemoryAllocationSize); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceMaintenance3Properties const &) const = default; #else bool operator==(PhysicalDeviceMaintenance3Properties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (maxPerSetDescriptors == rhs.maxPerSetDescriptors) && (maxMemoryAllocationSize == rhs.maxMemoryAllocationSize); # endif } bool operator!=(PhysicalDeviceMaintenance3Properties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance3Properties; void *pNext = {}; uint32_t maxPerSetDescriptors = {}; VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties) == sizeof(VkPhysicalDeviceMaintenance3Properties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceMaintenance3Properties is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceMaintenance3Properties; }; using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties; struct PhysicalDeviceMaintenance4Features { using NativeType = VkPhysicalDeviceMaintenance4Features; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance4Features; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Features(VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), maintenance4(maintenance4_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Features(PhysicalDeviceMaintenance4Features const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMaintenance4Features(VkPhysicalDeviceMaintenance4Features const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMaintenance4Features(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceMaintenance4Features &operator=(PhysicalDeviceMaintenance4Features const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMaintenance4Features &operator=(VkPhysicalDeviceMaintenance4Features const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance4Features &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance4Features &setMaintenance4(VULKAN_HPP_NAMESPACE::Bool32 maintenance4_) VULKAN_HPP_NOEXCEPT { maintenance4 = maintenance4_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceMaintenance4Features const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceMaintenance4Features &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, maintenance4); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceMaintenance4Features const &) const = default; #else bool operator==(PhysicalDeviceMaintenance4Features const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (maintenance4 == rhs.maintenance4); # endif } bool operator!=(PhysicalDeviceMaintenance4Features const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance4Features; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 maintenance4 = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features) == sizeof(VkPhysicalDeviceMaintenance4Features), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceMaintenance4Features is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceMaintenance4Features; }; using PhysicalDeviceMaintenance4FeaturesKHR = PhysicalDeviceMaintenance4Features; struct PhysicalDeviceMaintenance4Properties { using NativeType = VkPhysicalDeviceMaintenance4Properties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance4Properties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Properties(VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), maxBufferSize(maxBufferSize_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Properties(PhysicalDeviceMaintenance4Properties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMaintenance4Properties(VkPhysicalDeviceMaintenance4Properties const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMaintenance4Properties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceMaintenance4Properties &operator=(PhysicalDeviceMaintenance4Properties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMaintenance4Properties &operator=(VkPhysicalDeviceMaintenance4Properties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceMaintenance4Properties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceMaintenance4Properties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, maxBufferSize); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceMaintenance4Properties const &) const = default; #else bool operator==(PhysicalDeviceMaintenance4Properties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (maxBufferSize == rhs.maxBufferSize); # endif } bool operator!=(PhysicalDeviceMaintenance4Properties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance4Properties; void *pNext = {}; VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties) == sizeof(VkPhysicalDeviceMaintenance4Properties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceMaintenance4Properties is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceMaintenance4Properties; }; using PhysicalDeviceMaintenance4PropertiesKHR = PhysicalDeviceMaintenance4Properties; struct PhysicalDeviceMemoryBudgetPropertiesEXT { using NativeType = VkPhysicalDeviceMemoryBudgetPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT(std::array const &heapBudget_ = {}, std::array const &heapUsage_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), heapBudget(heapBudget_), heapUsage(heapUsage_) { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT(PhysicalDeviceMemoryBudgetPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMemoryBudgetPropertiesEXT(VkPhysicalDeviceMemoryBudgetPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMemoryBudgetPropertiesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceMemoryBudgetPropertiesEXT &operator=(PhysicalDeviceMemoryBudgetPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMemoryBudgetPropertiesEXT &operator=(VkPhysicalDeviceMemoryBudgetPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceMemoryBudgetPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, heapBudget, heapUsage); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceMemoryBudgetPropertiesEXT const &) const = default; #else bool operator==(PhysicalDeviceMemoryBudgetPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (heapBudget == rhs.heapBudget) && (heapUsage == rhs.heapUsage); # endif } bool operator!=(PhysicalDeviceMemoryBudgetPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D heapBudget = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D heapUsage = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT) == sizeof(VkPhysicalDeviceMemoryBudgetPropertiesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceMemoryBudgetPropertiesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceMemoryBudgetPropertiesEXT; }; struct PhysicalDeviceMemoryPriorityFeaturesEXT { using NativeType = VkPhysicalDeviceMemoryPriorityFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), memoryPriority(memoryPriority_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT(PhysicalDeviceMemoryPriorityFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMemoryPriorityFeaturesEXT(VkPhysicalDeviceMemoryPriorityFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMemoryPriorityFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceMemoryPriorityFeaturesEXT &operator=(PhysicalDeviceMemoryPriorityFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMemoryPriorityFeaturesEXT &operator=(VkPhysicalDeviceMemoryPriorityFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryPriorityFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryPriorityFeaturesEXT &setMemoryPriority(VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_) VULKAN_HPP_NOEXCEPT { memoryPriority = memoryPriority_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceMemoryPriorityFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, memoryPriority); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceMemoryPriorityFeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceMemoryPriorityFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (memoryPriority == rhs.memoryPriority); # endif } bool operator!=(PhysicalDeviceMemoryPriorityFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 memoryPriority = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT) == sizeof(VkPhysicalDeviceMemoryPriorityFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceMemoryPriorityFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceMemoryPriorityFeaturesEXT; }; struct PhysicalDeviceMemoryProperties { using NativeType = VkPhysicalDeviceMemoryProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties(uint32_t memoryTypeCount_ = {}, std::array const &memoryTypes_ = {}, uint32_t memoryHeapCount_ = {}, std::array const &memoryHeaps_ = {}) VULKAN_HPP_NOEXCEPT : memoryTypeCount(memoryTypeCount_), memoryTypes(memoryTypes_), memoryHeapCount(memoryHeapCount_), memoryHeaps(memoryHeaps_) { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties(PhysicalDeviceMemoryProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMemoryProperties(VkPhysicalDeviceMemoryProperties const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMemoryProperties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceMemoryProperties &operator=(PhysicalDeviceMemoryProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMemoryProperties &operator=(VkPhysicalDeviceMemoryProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceMemoryProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceMemoryProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(memoryTypeCount, memoryTypes, memoryHeapCount, memoryHeaps); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceMemoryProperties const &) const = default; #else bool operator==(PhysicalDeviceMemoryProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (memoryTypeCount == rhs.memoryTypeCount) && (memoryTypes == rhs.memoryTypes) && (memoryHeapCount == rhs.memoryHeapCount) && (memoryHeaps == rhs.memoryHeaps); # endif } bool operator!=(PhysicalDeviceMemoryProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t memoryTypeCount = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D memoryTypes = {}; uint32_t memoryHeapCount = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D memoryHeaps = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties) == sizeof(VkPhysicalDeviceMemoryProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceMemoryProperties is not nothrow_move_constructible!"); struct PhysicalDeviceMemoryProperties2 { using NativeType = VkPhysicalDeviceMemoryProperties2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryProperties2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), memoryProperties(memoryProperties_) { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2(PhysicalDeviceMemoryProperties2 const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMemoryProperties2(VkPhysicalDeviceMemoryProperties2 const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMemoryProperties2(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceMemoryProperties2 &operator=(PhysicalDeviceMemoryProperties2 const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMemoryProperties2 &operator=(VkPhysicalDeviceMemoryProperties2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceMemoryProperties2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceMemoryProperties2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, memoryProperties); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceMemoryProperties2 const &) const = default; #else bool operator==(PhysicalDeviceMemoryProperties2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (memoryProperties == rhs.memoryProperties); # endif } bool operator!=(PhysicalDeviceMemoryProperties2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2; void *pNext = {}; VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2) == sizeof(VkPhysicalDeviceMemoryProperties2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceMemoryProperties2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceMemoryProperties2; }; using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2; struct PhysicalDeviceMeshShaderFeaturesNV { using NativeType = VkPhysicalDeviceMeshShaderFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), taskShader(taskShader_), meshShader(meshShader_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV(PhysicalDeviceMeshShaderFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMeshShaderFeaturesNV(VkPhysicalDeviceMeshShaderFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMeshShaderFeaturesNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceMeshShaderFeaturesNV &operator=(PhysicalDeviceMeshShaderFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMeshShaderFeaturesNV &operator=(VkPhysicalDeviceMeshShaderFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV &setTaskShader(VULKAN_HPP_NAMESPACE::Bool32 taskShader_) VULKAN_HPP_NOEXCEPT { taskShader = taskShader_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV &setMeshShader(VULKAN_HPP_NAMESPACE::Bool32 meshShader_) VULKAN_HPP_NOEXCEPT { meshShader = meshShader_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceMeshShaderFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceMeshShaderFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, taskShader, meshShader); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceMeshShaderFeaturesNV const &) const = default; #else bool operator==(PhysicalDeviceMeshShaderFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (taskShader == rhs.taskShader) && (meshShader == rhs.meshShader); # endif } bool operator!=(PhysicalDeviceMeshShaderFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 taskShader = {}; VULKAN_HPP_NAMESPACE::Bool32 meshShader = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV) == sizeof(VkPhysicalDeviceMeshShaderFeaturesNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceMeshShaderFeaturesNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceMeshShaderFeaturesNV; }; struct PhysicalDeviceMeshShaderPropertiesNV { using NativeType = VkPhysicalDeviceMeshShaderPropertiesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV(uint32_t maxDrawMeshTasksCount_ = {}, uint32_t maxTaskWorkGroupInvocations_ = {}, std::array const &maxTaskWorkGroupSize_ = {}, uint32_t maxTaskTotalMemorySize_ = {}, uint32_t maxTaskOutputCount_ = {}, uint32_t maxMeshWorkGroupInvocations_ = {}, std::array const &maxMeshWorkGroupSize_ = {}, uint32_t maxMeshTotalMemorySize_ = {}, uint32_t maxMeshOutputVertices_ = {}, uint32_t maxMeshOutputPrimitives_ = {}, uint32_t maxMeshMultiviewViewCount_ = {}, uint32_t meshOutputPerVertexGranularity_ = {}, uint32_t meshOutputPerPrimitiveGranularity_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), maxDrawMeshTasksCount(maxDrawMeshTasksCount_), maxTaskWorkGroupInvocations(maxTaskWorkGroupInvocations_), maxTaskWorkGroupSize(maxTaskWorkGroupSize_), maxTaskTotalMemorySize(maxTaskTotalMemorySize_), maxTaskOutputCount(maxTaskOutputCount_), maxMeshWorkGroupInvocations(maxMeshWorkGroupInvocations_), maxMeshWorkGroupSize(maxMeshWorkGroupSize_), maxMeshTotalMemorySize(maxMeshTotalMemorySize_), maxMeshOutputVertices(maxMeshOutputVertices_), maxMeshOutputPrimitives(maxMeshOutputPrimitives_), maxMeshMultiviewViewCount(maxMeshMultiviewViewCount_), meshOutputPerVertexGranularity(meshOutputPerVertexGranularity_), meshOutputPerPrimitiveGranularity(meshOutputPerPrimitiveGranularity_) { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV(PhysicalDeviceMeshShaderPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMeshShaderPropertiesNV(VkPhysicalDeviceMeshShaderPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMeshShaderPropertiesNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceMeshShaderPropertiesNV &operator=(PhysicalDeviceMeshShaderPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMeshShaderPropertiesNV &operator=(VkPhysicalDeviceMeshShaderPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceMeshShaderPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceMeshShaderPropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, maxDrawMeshTasksCount, maxTaskWorkGroupInvocations, maxTaskWorkGroupSize, maxTaskTotalMemorySize, maxTaskOutputCount, maxMeshWorkGroupInvocations, maxMeshWorkGroupSize, maxMeshTotalMemorySize, maxMeshOutputVertices, maxMeshOutputPrimitives, maxMeshMultiviewViewCount, meshOutputPerVertexGranularity, meshOutputPerPrimitiveGranularity); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceMeshShaderPropertiesNV const &) const = default; #else bool operator==(PhysicalDeviceMeshShaderPropertiesNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (maxDrawMeshTasksCount == rhs.maxDrawMeshTasksCount) && (maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations) && (maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize) && (maxTaskTotalMemorySize == rhs.maxTaskTotalMemorySize) && (maxTaskOutputCount == rhs.maxTaskOutputCount) && (maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations) && (maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize) && (maxMeshTotalMemorySize == rhs.maxMeshTotalMemorySize) && (maxMeshOutputVertices == rhs.maxMeshOutputVertices) && (maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives) && (maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount) && (meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity) && (meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity); # endif } bool operator!=(PhysicalDeviceMeshShaderPropertiesNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV; void *pNext = {}; uint32_t maxDrawMeshTasksCount = {}; uint32_t maxTaskWorkGroupInvocations = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxTaskWorkGroupSize = {}; uint32_t maxTaskTotalMemorySize = {}; uint32_t maxTaskOutputCount = {}; uint32_t maxMeshWorkGroupInvocations = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxMeshWorkGroupSize = {}; uint32_t maxMeshTotalMemorySize = {}; uint32_t maxMeshOutputVertices = {}; uint32_t maxMeshOutputPrimitives = {}; uint32_t maxMeshMultiviewViewCount = {}; uint32_t meshOutputPerVertexGranularity = {}; uint32_t meshOutputPerPrimitiveGranularity = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV) == sizeof(VkPhysicalDeviceMeshShaderPropertiesNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceMeshShaderPropertiesNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceMeshShaderPropertiesNV; }; struct PhysicalDeviceMultiDrawFeaturesEXT { using NativeType = VkPhysicalDeviceMultiDrawFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiDrawFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 multiDraw_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), multiDraw(multiDraw_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawFeaturesEXT(PhysicalDeviceMultiDrawFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMultiDrawFeaturesEXT(VkPhysicalDeviceMultiDrawFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMultiDrawFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceMultiDrawFeaturesEXT &operator=(PhysicalDeviceMultiDrawFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMultiDrawFeaturesEXT &operator=(VkPhysicalDeviceMultiDrawFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiDrawFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiDrawFeaturesEXT &setMultiDraw(VULKAN_HPP_NAMESPACE::Bool32 multiDraw_) VULKAN_HPP_NOEXCEPT { multiDraw = multiDraw_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceMultiDrawFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceMultiDrawFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, multiDraw); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceMultiDrawFeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceMultiDrawFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (multiDraw == rhs.multiDraw); # endif } bool operator!=(PhysicalDeviceMultiDrawFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiDrawFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 multiDraw = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT) == sizeof(VkPhysicalDeviceMultiDrawFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceMultiDrawFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceMultiDrawFeaturesEXT; }; struct PhysicalDeviceMultiDrawPropertiesEXT { using NativeType = VkPhysicalDeviceMultiDrawPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiDrawPropertiesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawPropertiesEXT(uint32_t maxMultiDrawCount_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), maxMultiDrawCount(maxMultiDrawCount_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawPropertiesEXT(PhysicalDeviceMultiDrawPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMultiDrawPropertiesEXT(VkPhysicalDeviceMultiDrawPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMultiDrawPropertiesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceMultiDrawPropertiesEXT &operator=(PhysicalDeviceMultiDrawPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMultiDrawPropertiesEXT &operator=(VkPhysicalDeviceMultiDrawPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceMultiDrawPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceMultiDrawPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, maxMultiDrawCount); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceMultiDrawPropertiesEXT const &) const = default; #else bool operator==(PhysicalDeviceMultiDrawPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (maxMultiDrawCount == rhs.maxMultiDrawCount); # endif } bool operator!=(PhysicalDeviceMultiDrawPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiDrawPropertiesEXT; void *pNext = {}; uint32_t maxMultiDrawCount = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT) == sizeof(VkPhysicalDeviceMultiDrawPropertiesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceMultiDrawPropertiesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceMultiDrawPropertiesEXT; }; struct PhysicalDeviceMultiviewFeatures { using NativeType = VkPhysicalDeviceMultiviewFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewFeatures; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures(VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), multiview(multiview_), multiviewGeometryShader(multiviewGeometryShader_), multiviewTessellationShader(multiviewTessellationShader_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures(PhysicalDeviceMultiviewFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMultiviewFeatures(VkPhysicalDeviceMultiviewFeatures const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMultiviewFeatures(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceMultiviewFeatures &operator=(PhysicalDeviceMultiviewFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMultiviewFeatures &operator=(VkPhysicalDeviceMultiviewFeatures const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures &setMultiview(VULKAN_HPP_NAMESPACE::Bool32 multiview_) VULKAN_HPP_NOEXCEPT { multiview = multiview_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setMultiviewGeometryShader(VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_) VULKAN_HPP_NOEXCEPT { multiviewGeometryShader = multiviewGeometryShader_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setMultiviewTessellationShader(VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_) VULKAN_HPP_NOEXCEPT { multiviewTessellationShader = multiviewTessellationShader_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceMultiviewFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceMultiviewFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, multiview, multiviewGeometryShader, multiviewTessellationShader); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceMultiviewFeatures const &) const = default; #else bool operator==(PhysicalDeviceMultiviewFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (multiview == rhs.multiview) && (multiviewGeometryShader == rhs.multiviewGeometryShader) && (multiviewTessellationShader == rhs.multiviewTessellationShader); # endif } bool operator!=(PhysicalDeviceMultiviewFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewFeatures; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 multiview = {}; VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {}; VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures) == sizeof(VkPhysicalDeviceMultiviewFeatures), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceMultiviewFeatures is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceMultiviewFeatures; }; using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures; struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX { using NativeType = VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), perViewPositionAllComponents(perViewPositionAllComponents_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=(PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &operator=(VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, perViewPositionAllComponents); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const &) const = default; #else bool operator==(PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (perViewPositionAllComponents == rhs.perViewPositionAllComponents); # endif } bool operator!=(PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX) == sizeof(VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; }; struct PhysicalDeviceMultiviewProperties { using NativeType = VkPhysicalDeviceMultiviewProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties(uint32_t maxMultiviewViewCount_ = {}, uint32_t maxMultiviewInstanceIndex_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), maxMultiviewViewCount(maxMultiviewViewCount_), maxMultiviewInstanceIndex(maxMultiviewInstanceIndex_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties(PhysicalDeviceMultiviewProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMultiviewProperties(VkPhysicalDeviceMultiviewProperties const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMultiviewProperties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceMultiviewProperties &operator=(PhysicalDeviceMultiviewProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMultiviewProperties &operator=(VkPhysicalDeviceMultiviewProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceMultiviewProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceMultiviewProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, maxMultiviewViewCount, maxMultiviewInstanceIndex); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceMultiviewProperties const &) const = default; #else bool operator==(PhysicalDeviceMultiviewProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (maxMultiviewViewCount == rhs.maxMultiviewViewCount) && (maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex); # endif } bool operator!=(PhysicalDeviceMultiviewProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewProperties; void *pNext = {}; uint32_t maxMultiviewViewCount = {}; uint32_t maxMultiviewInstanceIndex = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties) == sizeof(VkPhysicalDeviceMultiviewProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceMultiviewProperties is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceMultiviewProperties; }; using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties; struct PhysicalDeviceMutableDescriptorTypeFeaturesVALVE { using NativeType = VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceMutableDescriptorTypeFeaturesVALVE(VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), mutableDescriptorType(mutableDescriptorType_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMutableDescriptorTypeFeaturesVALVE(PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMutableDescriptorTypeFeaturesVALVE(VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMutableDescriptorTypeFeaturesVALVE(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceMutableDescriptorTypeFeaturesVALVE &operator=(PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMutableDescriptorTypeFeaturesVALVE &operator=(VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesVALVE &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesVALVE & setMutableDescriptorType(VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_) VULKAN_HPP_NOEXCEPT { mutableDescriptorType = mutableDescriptorType_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, mutableDescriptorType); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const &) const = default; #else bool operator==(PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (mutableDescriptorType == rhs.mutableDescriptorType); # endif } bool operator!=(PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesVALVE) == sizeof(VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceMutableDescriptorTypeFeaturesVALVE is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceMutableDescriptorTypeFeaturesVALVE; }; struct PhysicalDevicePCIBusInfoPropertiesEXT { using NativeType = VkPhysicalDevicePCIBusInfoPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( uint32_t pciDomain_ = {}, uint32_t pciBus_ = {}, uint32_t pciDevice_ = {}, uint32_t pciFunction_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), pciDomain(pciDomain_), pciBus(pciBus_), pciDevice(pciDevice_), pciFunction(pciFunction_) { } VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT(PhysicalDevicePCIBusInfoPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePCIBusInfoPropertiesEXT(VkPhysicalDevicePCIBusInfoPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDevicePCIBusInfoPropertiesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDevicePCIBusInfoPropertiesEXT &operator=(PhysicalDevicePCIBusInfoPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePCIBusInfoPropertiesEXT &operator=(VkPhysicalDevicePCIBusInfoPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDevicePCIBusInfoPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDevicePCIBusInfoPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, pciDomain, pciBus, pciDevice, pciFunction); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDevicePCIBusInfoPropertiesEXT const &) const = default; #else bool operator==(PhysicalDevicePCIBusInfoPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (pciDomain == rhs.pciDomain) && (pciBus == rhs.pciBus) && (pciDevice == rhs.pciDevice) && (pciFunction == rhs.pciFunction); # endif } bool operator!=(PhysicalDevicePCIBusInfoPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT; void *pNext = {}; uint32_t pciDomain = {}; uint32_t pciBus = {}; uint32_t pciDevice = {}; uint32_t pciFunction = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT) == sizeof(VkPhysicalDevicePCIBusInfoPropertiesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDevicePCIBusInfoPropertiesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDevicePCIBusInfoPropertiesEXT; }; struct PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT { using NativeType = VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), pageableDeviceLocalMemory(pageableDeviceLocalMemory_) { } VULKAN_HPP_CONSTEXPR PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT(PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT(VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & operator=(PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT &operator=(VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & setPageableDeviceLocalMemory(VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory_) VULKAN_HPP_NOEXCEPT { pageableDeviceLocalMemory = pageableDeviceLocalMemory_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, pageableDeviceLocalMemory); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const &) const = default; #else bool operator==(PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (pageableDeviceLocalMemory == rhs.pageableDeviceLocalMemory); # endif } bool operator!=(PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT) == sizeof(VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; }; struct PhysicalDevicePerformanceQueryFeaturesKHR { using NativeType = VkPhysicalDevicePerformanceQueryFeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ = {}, VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), performanceCounterQueryPools(performanceCounterQueryPools_), performanceCounterMultipleQueryPools(performanceCounterMultipleQueryPools_) { } VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR(PhysicalDevicePerformanceQueryFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePerformanceQueryFeaturesKHR(VkPhysicalDevicePerformanceQueryFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDevicePerformanceQueryFeaturesKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDevicePerformanceQueryFeaturesKHR &operator=(PhysicalDevicePerformanceQueryFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePerformanceQueryFeaturesKHR &operator=(VkPhysicalDevicePerformanceQueryFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & setPerformanceCounterQueryPools(VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_) VULKAN_HPP_NOEXCEPT { performanceCounterQueryPools = performanceCounterQueryPools_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & setPerformanceCounterMultipleQueryPools(VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_) VULKAN_HPP_NOEXCEPT { performanceCounterMultipleQueryPools = performanceCounterMultipleQueryPools_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDevicePerformanceQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDevicePerformanceQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, performanceCounterQueryPools, performanceCounterMultipleQueryPools); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDevicePerformanceQueryFeaturesKHR const &) const = default; #else bool operator==(PhysicalDevicePerformanceQueryFeaturesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (performanceCounterQueryPools == rhs.performanceCounterQueryPools) && (performanceCounterMultipleQueryPools == rhs.performanceCounterMultipleQueryPools); # endif } bool operator!=(PhysicalDevicePerformanceQueryFeaturesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools = {}; VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR) == sizeof(VkPhysicalDevicePerformanceQueryFeaturesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDevicePerformanceQueryFeaturesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDevicePerformanceQueryFeaturesKHR; }; struct PhysicalDevicePerformanceQueryPropertiesKHR { using NativeType = VkPhysicalDevicePerformanceQueryPropertiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR(VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), allowCommandBufferQueryCopies(allowCommandBufferQueryCopies_) { } VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR(PhysicalDevicePerformanceQueryPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePerformanceQueryPropertiesKHR(VkPhysicalDevicePerformanceQueryPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDevicePerformanceQueryPropertiesKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDevicePerformanceQueryPropertiesKHR &operator=(PhysicalDevicePerformanceQueryPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePerformanceQueryPropertiesKHR &operator=(VkPhysicalDevicePerformanceQueryPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDevicePerformanceQueryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDevicePerformanceQueryPropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, allowCommandBufferQueryCopies); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDevicePerformanceQueryPropertiesKHR const &) const = default; #else bool operator==(PhysicalDevicePerformanceQueryPropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (allowCommandBufferQueryCopies == rhs.allowCommandBufferQueryCopies); # endif } bool operator!=(PhysicalDevicePerformanceQueryPropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR) == sizeof(VkPhysicalDevicePerformanceQueryPropertiesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDevicePerformanceQueryPropertiesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDevicePerformanceQueryPropertiesKHR; }; struct PhysicalDevicePipelineCreationCacheControlFeatures { using NativeType = VkPhysicalDevicePipelineCreationCacheControlFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeatures(VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), pipelineCreationCacheControl(pipelineCreationCacheControl_) { } VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeatures(PhysicalDevicePipelineCreationCacheControlFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePipelineCreationCacheControlFeatures(VkPhysicalDevicePipelineCreationCacheControlFeatures const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDevicePipelineCreationCacheControlFeatures(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDevicePipelineCreationCacheControlFeatures & operator=(PhysicalDevicePipelineCreationCacheControlFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePipelineCreationCacheControlFeatures &operator=(VkPhysicalDevicePipelineCreationCacheControlFeatures const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeatures &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeatures & setPipelineCreationCacheControl(VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_) VULKAN_HPP_NOEXCEPT { pipelineCreationCacheControl = pipelineCreationCacheControl_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDevicePipelineCreationCacheControlFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDevicePipelineCreationCacheControlFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, pipelineCreationCacheControl); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDevicePipelineCreationCacheControlFeatures const &) const = default; #else bool operator==(PhysicalDevicePipelineCreationCacheControlFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (pipelineCreationCacheControl == rhs.pipelineCreationCacheControl); # endif } bool operator!=(PhysicalDevicePipelineCreationCacheControlFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures) == sizeof(VkPhysicalDevicePipelineCreationCacheControlFeatures), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDevicePipelineCreationCacheControlFeatures is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDevicePipelineCreationCacheControlFeatures; }; using PhysicalDevicePipelineCreationCacheControlFeaturesEXT = PhysicalDevicePipelineCreationCacheControlFeatures; struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR { using NativeType = VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), pipelineExecutableInfo(pipelineExecutableInfo_) { } VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR(PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePipelineExecutablePropertiesFeaturesKHR(VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDevicePipelineExecutablePropertiesFeaturesKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=(PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePipelineExecutablePropertiesFeaturesKHR &operator=(VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPipelineExecutableInfo(VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_) VULKAN_HPP_NOEXCEPT { pipelineExecutableInfo = pipelineExecutableInfo_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, pipelineExecutableInfo); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const &) const = default; #else bool operator==(PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (pipelineExecutableInfo == rhs.pipelineExecutableInfo); # endif } bool operator!=(PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR) == sizeof(VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDevicePipelineExecutablePropertiesFeaturesKHR; }; struct PhysicalDevicePointClippingProperties { using NativeType = VkPhysicalDevicePointClippingProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePointClippingProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), pointClippingBehavior(pointClippingBehavior_) { } VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties(PhysicalDevicePointClippingProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePointClippingProperties(VkPhysicalDevicePointClippingProperties const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDevicePointClippingProperties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDevicePointClippingProperties &operator=(PhysicalDevicePointClippingProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePointClippingProperties &operator=(VkPhysicalDevicePointClippingProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDevicePointClippingProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDevicePointClippingProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, pointClippingBehavior); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDevicePointClippingProperties const &) const = default; #else bool operator==(PhysicalDevicePointClippingProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (pointClippingBehavior == rhs.pointClippingBehavior); # endif } bool operator!=(PhysicalDevicePointClippingProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePointClippingProperties; void *pNext = {}; VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties) == sizeof(VkPhysicalDevicePointClippingProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDevicePointClippingProperties is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDevicePointClippingProperties; }; using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties; #if defined(VK_ENABLE_BETA_EXTENSIONS) struct PhysicalDevicePortabilitySubsetFeaturesKHR { using NativeType = VkPhysicalDevicePortabilitySubsetFeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ = {}, VULKAN_HPP_NAMESPACE::Bool32 events_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ = {}, VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ = {}, VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ = {}, VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ = {}, VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), constantAlphaColorBlendFactors(constantAlphaColorBlendFactors_), events(events_), imageViewFormatReinterpretation(imageViewFormatReinterpretation_), imageViewFormatSwizzle(imageViewFormatSwizzle_), imageView2DOn3DImage(imageView2DOn3DImage_), multisampleArrayImage(multisampleArrayImage_), mutableComparisonSamplers(mutableComparisonSamplers_), pointPolygons(pointPolygons_), samplerMipLodBias(samplerMipLodBias_), separateStencilMaskRef(separateStencilMaskRef_), shaderSampleRateInterpolationFunctions(shaderSampleRateInterpolationFunctions_), tessellationIsolines(tessellationIsolines_), tessellationPointMode(tessellationPointMode_), triangleFans(triangleFans_), vertexAttributeAccessBeyondStride(vertexAttributeAccessBeyondStride_) { } VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR(PhysicalDevicePortabilitySubsetFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePortabilitySubsetFeaturesKHR(VkPhysicalDevicePortabilitySubsetFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDevicePortabilitySubsetFeaturesKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDevicePortabilitySubsetFeaturesKHR &operator=(PhysicalDevicePortabilitySubsetFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePortabilitySubsetFeaturesKHR &operator=(VkPhysicalDevicePortabilitySubsetFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setConstantAlphaColorBlendFactors(VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_) VULKAN_HPP_NOEXCEPT { constantAlphaColorBlendFactors = constantAlphaColorBlendFactors_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR &setEvents(VULKAN_HPP_NAMESPACE::Bool32 events_) VULKAN_HPP_NOEXCEPT { events = events_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setImageViewFormatReinterpretation(VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_) VULKAN_HPP_NOEXCEPT { imageViewFormatReinterpretation = imageViewFormatReinterpretation_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setImageViewFormatSwizzle(VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_) VULKAN_HPP_NOEXCEPT { imageViewFormatSwizzle = imageViewFormatSwizzle_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setImageView2DOn3DImage(VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_) VULKAN_HPP_NOEXCEPT { imageView2DOn3DImage = imageView2DOn3DImage_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setMultisampleArrayImage(VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_) VULKAN_HPP_NOEXCEPT { multisampleArrayImage = multisampleArrayImage_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setMutableComparisonSamplers(VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_) VULKAN_HPP_NOEXCEPT { mutableComparisonSamplers = mutableComparisonSamplers_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR &setPointPolygons(VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_) VULKAN_HPP_NOEXCEPT { pointPolygons = pointPolygons_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setSamplerMipLodBias(VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_) VULKAN_HPP_NOEXCEPT { samplerMipLodBias = samplerMipLodBias_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setSeparateStencilMaskRef(VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_) VULKAN_HPP_NOEXCEPT { separateStencilMaskRef = separateStencilMaskRef_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setShaderSampleRateInterpolationFunctions(VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_) VULKAN_HPP_NOEXCEPT { shaderSampleRateInterpolationFunctions = shaderSampleRateInterpolationFunctions_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setTessellationIsolines(VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_) VULKAN_HPP_NOEXCEPT { tessellationIsolines = tessellationIsolines_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setTessellationPointMode(VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_) VULKAN_HPP_NOEXCEPT { tessellationPointMode = tessellationPointMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR &setTriangleFans(VULKAN_HPP_NAMESPACE::Bool32 triangleFans_) VULKAN_HPP_NOEXCEPT { triangleFans = triangleFans_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setVertexAttributeAccessBeyondStride(VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_) VULKAN_HPP_NOEXCEPT { vertexAttributeAccessBeyondStride = vertexAttributeAccessBeyondStride_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDevicePortabilitySubsetFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDevicePortabilitySubsetFeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, constantAlphaColorBlendFactors, events, imageViewFormatReinterpretation, imageViewFormatSwizzle, imageView2DOn3DImage, multisampleArrayImage, mutableComparisonSamplers, pointPolygons, samplerMipLodBias, separateStencilMaskRef, shaderSampleRateInterpolationFunctions, tessellationIsolines, tessellationPointMode, triangleFans, vertexAttributeAccessBeyondStride); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDevicePortabilitySubsetFeaturesKHR const &) const = default; # else bool operator==(PhysicalDevicePortabilitySubsetFeaturesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (constantAlphaColorBlendFactors == rhs.constantAlphaColorBlendFactors) && (events == rhs.events) && (imageViewFormatReinterpretation == rhs.imageViewFormatReinterpretation) && (imageViewFormatSwizzle == rhs.imageViewFormatSwizzle) && (imageView2DOn3DImage == rhs.imageView2DOn3DImage) && (multisampleArrayImage == rhs.multisampleArrayImage) && (mutableComparisonSamplers == rhs.mutableComparisonSamplers) && (pointPolygons == rhs.pointPolygons) && (samplerMipLodBias == rhs.samplerMipLodBias) && (separateStencilMaskRef == rhs.separateStencilMaskRef) && (shaderSampleRateInterpolationFunctions == rhs.shaderSampleRateInterpolationFunctions) && (tessellationIsolines == rhs.tessellationIsolines) && (tessellationPointMode == rhs.tessellationPointMode) && (triangleFans == rhs.triangleFans) && (vertexAttributeAccessBeyondStride == rhs.vertexAttributeAccessBeyondStride); # endif } bool operator!=(PhysicalDevicePortabilitySubsetFeaturesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors = {}; VULKAN_HPP_NAMESPACE::Bool32 events = {}; VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation = {}; VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle = {}; VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage = {}; VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage = {}; VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers = {}; VULKAN_HPP_NAMESPACE::Bool32 pointPolygons = {}; VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias = {}; VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions = {}; VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines = {}; VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode = {}; VULKAN_HPP_NAMESPACE::Bool32 triangleFans = {}; VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR) == sizeof(VkPhysicalDevicePortabilitySubsetFeaturesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDevicePortabilitySubsetFeaturesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDevicePortabilitySubsetFeaturesKHR; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct PhysicalDevicePortabilitySubsetPropertiesKHR { using NativeType = VkPhysicalDevicePortabilitySubsetPropertiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR(uint32_t minVertexInputBindingStrideAlignment_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), minVertexInputBindingStrideAlignment(minVertexInputBindingStrideAlignment_) { } VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR(PhysicalDevicePortabilitySubsetPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePortabilitySubsetPropertiesKHR(VkPhysicalDevicePortabilitySubsetPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDevicePortabilitySubsetPropertiesKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDevicePortabilitySubsetPropertiesKHR &operator=(PhysicalDevicePortabilitySubsetPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePortabilitySubsetPropertiesKHR &operator=(VkPhysicalDevicePortabilitySubsetPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetPropertiesKHR &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetPropertiesKHR & setMinVertexInputBindingStrideAlignment(uint32_t minVertexInputBindingStrideAlignment_) VULKAN_HPP_NOEXCEPT { minVertexInputBindingStrideAlignment = minVertexInputBindingStrideAlignment_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDevicePortabilitySubsetPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDevicePortabilitySubsetPropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, minVertexInputBindingStrideAlignment); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDevicePortabilitySubsetPropertiesKHR const &) const = default; # else bool operator==(PhysicalDevicePortabilitySubsetPropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (minVertexInputBindingStrideAlignment == rhs.minVertexInputBindingStrideAlignment); # endif } bool operator!=(PhysicalDevicePortabilitySubsetPropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR; void *pNext = {}; uint32_t minVertexInputBindingStrideAlignment = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR) == sizeof(VkPhysicalDevicePortabilitySubsetPropertiesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDevicePortabilitySubsetPropertiesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDevicePortabilitySubsetPropertiesKHR; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PhysicalDevicePresentIdFeaturesKHR { using NativeType = VkPhysicalDevicePresentIdFeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentIdFeaturesKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDevicePresentIdFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 presentId_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), presentId(presentId_) { } VULKAN_HPP_CONSTEXPR PhysicalDevicePresentIdFeaturesKHR(PhysicalDevicePresentIdFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePresentIdFeaturesKHR(VkPhysicalDevicePresentIdFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDevicePresentIdFeaturesKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDevicePresentIdFeaturesKHR &operator=(PhysicalDevicePresentIdFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePresentIdFeaturesKHR &operator=(VkPhysicalDevicePresentIdFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentIdFeaturesKHR &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentIdFeaturesKHR &setPresentId(VULKAN_HPP_NAMESPACE::Bool32 presentId_) VULKAN_HPP_NOEXCEPT { presentId = presentId_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDevicePresentIdFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDevicePresentIdFeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, presentId); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDevicePresentIdFeaturesKHR const &) const = default; #else bool operator==(PhysicalDevicePresentIdFeaturesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (presentId == rhs.presentId); # endif } bool operator!=(PhysicalDevicePresentIdFeaturesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentIdFeaturesKHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 presentId = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR) == sizeof(VkPhysicalDevicePresentIdFeaturesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDevicePresentIdFeaturesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDevicePresentIdFeaturesKHR; }; struct PhysicalDevicePresentWaitFeaturesKHR { using NativeType = VkPhysicalDevicePresentWaitFeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentWaitFeaturesKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWaitFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 presentWait_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), presentWait(presentWait_) { } VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWaitFeaturesKHR(PhysicalDevicePresentWaitFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePresentWaitFeaturesKHR(VkPhysicalDevicePresentWaitFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDevicePresentWaitFeaturesKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDevicePresentWaitFeaturesKHR &operator=(PhysicalDevicePresentWaitFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePresentWaitFeaturesKHR &operator=(VkPhysicalDevicePresentWaitFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWaitFeaturesKHR &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWaitFeaturesKHR &setPresentWait(VULKAN_HPP_NAMESPACE::Bool32 presentWait_) VULKAN_HPP_NOEXCEPT { presentWait = presentWait_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDevicePresentWaitFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDevicePresentWaitFeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, presentWait); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDevicePresentWaitFeaturesKHR const &) const = default; #else bool operator==(PhysicalDevicePresentWaitFeaturesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (presentWait == rhs.presentWait); # endif } bool operator!=(PhysicalDevicePresentWaitFeaturesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentWaitFeaturesKHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 presentWait = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR) == sizeof(VkPhysicalDevicePresentWaitFeaturesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDevicePresentWaitFeaturesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDevicePresentWaitFeaturesKHR; }; struct PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT { using NativeType = VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), primitiveTopologyListRestart(primitiveTopologyListRestart_), primitiveTopologyPatchListRestart(primitiveTopologyPatchListRestart_) { } VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT(PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT(VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & operator=(PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT &operator=(VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & setPrimitiveTopologyListRestart(VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart_) VULKAN_HPP_NOEXCEPT { primitiveTopologyListRestart = primitiveTopologyListRestart_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & setPrimitiveTopologyPatchListRestart(VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart_) VULKAN_HPP_NOEXCEPT { primitiveTopologyPatchListRestart = primitiveTopologyPatchListRestart_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, primitiveTopologyListRestart, primitiveTopologyPatchListRestart); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const &) const = default; #else bool operator==(PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (primitiveTopologyListRestart == rhs.primitiveTopologyListRestart) && (primitiveTopologyPatchListRestart == rhs.primitiveTopologyPatchListRestart); # endif } bool operator!=(PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart = {}; VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT) == sizeof(VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; }; struct PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT { using NativeType = VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQuery_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithRasterizerDiscard_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithNonZeroStreams_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), primitivesGeneratedQuery(primitivesGeneratedQuery_), primitivesGeneratedQueryWithRasterizerDiscard(primitivesGeneratedQueryWithRasterizerDiscard_), primitivesGeneratedQueryWithNonZeroStreams(primitivesGeneratedQueryWithNonZeroStreams_) { } VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT(PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT(VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & operator=(PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT &operator=(VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & setPrimitivesGeneratedQuery(VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQuery_) VULKAN_HPP_NOEXCEPT { primitivesGeneratedQuery = primitivesGeneratedQuery_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & setPrimitivesGeneratedQueryWithRasterizerDiscard(VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithRasterizerDiscard_) VULKAN_HPP_NOEXCEPT { primitivesGeneratedQueryWithRasterizerDiscard = primitivesGeneratedQueryWithRasterizerDiscard_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & setPrimitivesGeneratedQueryWithNonZeroStreams(VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithNonZeroStreams_) VULKAN_HPP_NOEXCEPT { primitivesGeneratedQueryWithNonZeroStreams = primitivesGeneratedQueryWithNonZeroStreams_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, primitivesGeneratedQuery, primitivesGeneratedQueryWithRasterizerDiscard, primitivesGeneratedQueryWithNonZeroStreams); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const &) const = default; #else bool operator==(PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (primitivesGeneratedQuery == rhs.primitivesGeneratedQuery) && (primitivesGeneratedQueryWithRasterizerDiscard == rhs.primitivesGeneratedQueryWithRasterizerDiscard) && (primitivesGeneratedQueryWithNonZeroStreams == rhs.primitivesGeneratedQueryWithNonZeroStreams); # endif } bool operator!=(PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQuery = {}; VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithRasterizerDiscard = {}; VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithNonZeroStreams = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT) == sizeof(VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; }; struct PhysicalDevicePrivateDataFeatures { using NativeType = VkPhysicalDevicePrivateDataFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrivateDataFeatures; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeatures(VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), privateData(privateData_) { } VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeatures(PhysicalDevicePrivateDataFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePrivateDataFeatures(VkPhysicalDevicePrivateDataFeatures const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDevicePrivateDataFeatures(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDevicePrivateDataFeatures &operator=(PhysicalDevicePrivateDataFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePrivateDataFeatures &operator=(VkPhysicalDevicePrivateDataFeatures const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeatures &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeatures &setPrivateData(VULKAN_HPP_NAMESPACE::Bool32 privateData_) VULKAN_HPP_NOEXCEPT { privateData = privateData_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDevicePrivateDataFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDevicePrivateDataFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, privateData); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDevicePrivateDataFeatures const &) const = default; #else bool operator==(PhysicalDevicePrivateDataFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (privateData == rhs.privateData); # endif } bool operator!=(PhysicalDevicePrivateDataFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrivateDataFeatures; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 privateData = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures) == sizeof(VkPhysicalDevicePrivateDataFeatures), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDevicePrivateDataFeatures is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDevicePrivateDataFeatures; }; using PhysicalDevicePrivateDataFeaturesEXT = PhysicalDevicePrivateDataFeatures; struct PhysicalDeviceSparseProperties { using NativeType = VkPhysicalDeviceSparseProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties(VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict_ = {}) VULKAN_HPP_NOEXCEPT : residencyStandard2DBlockShape(residencyStandard2DBlockShape_), residencyStandard2DMultisampleBlockShape(residencyStandard2DMultisampleBlockShape_), residencyStandard3DBlockShape(residencyStandard3DBlockShape_), residencyAlignedMipSize(residencyAlignedMipSize_), residencyNonResidentStrict(residencyNonResidentStrict_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties(PhysicalDeviceSparseProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSparseProperties(VkPhysicalDeviceSparseProperties const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceSparseProperties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceSparseProperties &operator=(PhysicalDeviceSparseProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSparseProperties &operator=(VkPhysicalDeviceSparseProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceSparseProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceSparseProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(residencyStandard2DBlockShape, residencyStandard2DMultisampleBlockShape, residencyStandard3DBlockShape, residencyAlignedMipSize, residencyNonResidentStrict); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceSparseProperties const &) const = default; #else bool operator==(PhysicalDeviceSparseProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape) && (residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape) && (residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape) && (residencyAlignedMipSize == rhs.residencyAlignedMipSize) && (residencyNonResidentStrict == rhs.residencyNonResidentStrict); # endif } bool operator!=(PhysicalDeviceSparseProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape = {}; VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape = {}; VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape = {}; VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize = {}; VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties) == sizeof(VkPhysicalDeviceSparseProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceSparseProperties is not nothrow_move_constructible!"); struct PhysicalDeviceProperties { using NativeType = VkPhysicalDeviceProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties(uint32_t apiVersion_ = {}, uint32_t driverVersion_ = {}, uint32_t vendorID_ = {}, uint32_t deviceID_ = {}, VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther, std::array const &deviceName_ = {}, std::array const &pipelineCacheUUID_ = {}, VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {}, VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {}) VULKAN_HPP_NOEXCEPT : apiVersion(apiVersion_), driverVersion(driverVersion_), vendorID(vendorID_), deviceID(deviceID_), deviceType(deviceType_), deviceName(deviceName_), pipelineCacheUUID(pipelineCacheUUID_), limits(limits_), sparseProperties(sparseProperties_) { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties(PhysicalDeviceProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceProperties(VkPhysicalDeviceProperties const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceProperties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceProperties &operator=(PhysicalDeviceProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceProperties &operator=(VkPhysicalDeviceProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits const &, VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(apiVersion, driverVersion, vendorID, deviceID, deviceType, deviceName, pipelineCacheUUID, limits, sparseProperties); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceProperties const &) const = default; #else bool operator==(PhysicalDeviceProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (apiVersion == rhs.apiVersion) && (driverVersion == rhs.driverVersion) && (vendorID == rhs.vendorID) && (deviceID == rhs.deviceID) && (deviceType == rhs.deviceType) && (deviceName == rhs.deviceName) && (pipelineCacheUUID == rhs.pipelineCacheUUID) && (limits == rhs.limits) && (sparseProperties == rhs.sparseProperties); # endif } bool operator!=(PhysicalDeviceProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t apiVersion = {}; uint32_t driverVersion = {}; uint32_t vendorID = {}; uint32_t deviceID = {}; VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther; VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceName = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D pipelineCacheUUID = {}; VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits = {}; VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties) == sizeof(VkPhysicalDeviceProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceProperties is not nothrow_move_constructible!"); struct PhysicalDeviceProperties2 { using NativeType = VkPhysicalDeviceProperties2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProperties2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), properties(properties_) { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2(PhysicalDeviceProperties2 const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceProperties2(VkPhysicalDeviceProperties2 const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceProperties2(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceProperties2 &operator=(PhysicalDeviceProperties2 const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceProperties2 &operator=(VkPhysicalDeviceProperties2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceProperties2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceProperties2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, properties); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceProperties2 const &) const = default; #else bool operator==(PhysicalDeviceProperties2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (properties == rhs.properties); # endif } bool operator!=(PhysicalDeviceProperties2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProperties2; void *pNext = {}; VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2) == sizeof(VkPhysicalDeviceProperties2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceProperties2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceProperties2; }; using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2; struct PhysicalDeviceProtectedMemoryFeatures { using NativeType = VkPhysicalDeviceProtectedMemoryFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryFeatures; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures(VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), protectedMemory(protectedMemory_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures(PhysicalDeviceProtectedMemoryFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceProtectedMemoryFeatures(VkPhysicalDeviceProtectedMemoryFeatures const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceProtectedMemoryFeatures(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceProtectedMemoryFeatures &operator=(PhysicalDeviceProtectedMemoryFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceProtectedMemoryFeatures &operator=(VkPhysicalDeviceProtectedMemoryFeatures const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures &setProtectedMemory(VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_) VULKAN_HPP_NOEXCEPT { protectedMemory = protectedMemory_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceProtectedMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceProtectedMemoryFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, protectedMemory); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceProtectedMemoryFeatures const &) const = default; #else bool operator==(PhysicalDeviceProtectedMemoryFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (protectedMemory == rhs.protectedMemory); # endif } bool operator!=(PhysicalDeviceProtectedMemoryFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures) == sizeof(VkPhysicalDeviceProtectedMemoryFeatures), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceProtectedMemoryFeatures is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceProtectedMemoryFeatures; }; struct PhysicalDeviceProtectedMemoryProperties { using NativeType = VkPhysicalDeviceProtectedMemoryProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties(VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), protectedNoFault(protectedNoFault_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties(PhysicalDeviceProtectedMemoryProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceProtectedMemoryProperties(VkPhysicalDeviceProtectedMemoryProperties const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceProtectedMemoryProperties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceProtectedMemoryProperties &operator=(PhysicalDeviceProtectedMemoryProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceProtectedMemoryProperties &operator=(VkPhysicalDeviceProtectedMemoryProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceProtectedMemoryProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceProtectedMemoryProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, protectedNoFault); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceProtectedMemoryProperties const &) const = default; #else bool operator==(PhysicalDeviceProtectedMemoryProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (protectedNoFault == rhs.protectedNoFault); # endif } bool operator!=(PhysicalDeviceProtectedMemoryProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryProperties; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties) == sizeof(VkPhysicalDeviceProtectedMemoryProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceProtectedMemoryProperties is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceProtectedMemoryProperties; }; struct PhysicalDeviceProvokingVertexFeaturesEXT { using NativeType = VkPhysicalDeviceProvokingVertexFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), provokingVertexLast(provokingVertexLast_), transformFeedbackPreservesProvokingVertex(transformFeedbackPreservesProvokingVertex_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT(PhysicalDeviceProvokingVertexFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceProvokingVertexFeaturesEXT(VkPhysicalDeviceProvokingVertexFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceProvokingVertexFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceProvokingVertexFeaturesEXT &operator=(PhysicalDeviceProvokingVertexFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceProvokingVertexFeaturesEXT &operator=(VkPhysicalDeviceProvokingVertexFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & setProvokingVertexLast(VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast_) VULKAN_HPP_NOEXCEPT { provokingVertexLast = provokingVertexLast_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & setTransformFeedbackPreservesProvokingVertex(VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex_) VULKAN_HPP_NOEXCEPT { transformFeedbackPreservesProvokingVertex = transformFeedbackPreservesProvokingVertex_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceProvokingVertexFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceProvokingVertexFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, provokingVertexLast, transformFeedbackPreservesProvokingVertex); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceProvokingVertexFeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceProvokingVertexFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (provokingVertexLast == rhs.provokingVertexLast) && (transformFeedbackPreservesProvokingVertex == rhs.transformFeedbackPreservesProvokingVertex); # endif } bool operator!=(PhysicalDeviceProvokingVertexFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast = {}; VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT) == sizeof(VkPhysicalDeviceProvokingVertexFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceProvokingVertexFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceProvokingVertexFeaturesEXT; }; struct PhysicalDeviceProvokingVertexPropertiesEXT { using NativeType = VkPhysicalDeviceProvokingVertexPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 provokingVertexModePerPipeline_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesTriangleFanProvokingVertex_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), provokingVertexModePerPipeline(provokingVertexModePerPipeline_), transformFeedbackPreservesTriangleFanProvokingVertex(transformFeedbackPreservesTriangleFanProvokingVertex_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT(PhysicalDeviceProvokingVertexPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceProvokingVertexPropertiesEXT(VkPhysicalDeviceProvokingVertexPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceProvokingVertexPropertiesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceProvokingVertexPropertiesEXT &operator=(PhysicalDeviceProvokingVertexPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceProvokingVertexPropertiesEXT &operator=(VkPhysicalDeviceProvokingVertexPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceProvokingVertexPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceProvokingVertexPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, provokingVertexModePerPipeline, transformFeedbackPreservesTriangleFanProvokingVertex); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceProvokingVertexPropertiesEXT const &) const = default; #else bool operator==(PhysicalDeviceProvokingVertexPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (provokingVertexModePerPipeline == rhs.provokingVertexModePerPipeline) && (transformFeedbackPreservesTriangleFanProvokingVertex == rhs.transformFeedbackPreservesTriangleFanProvokingVertex); # endif } bool operator!=(PhysicalDeviceProvokingVertexPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 provokingVertexModePerPipeline = {}; VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesTriangleFanProvokingVertex = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT) == sizeof(VkPhysicalDeviceProvokingVertexPropertiesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceProvokingVertexPropertiesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceProvokingVertexPropertiesEXT; }; struct PhysicalDevicePushDescriptorPropertiesKHR { using NativeType = VkPhysicalDevicePushDescriptorPropertiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR(uint32_t maxPushDescriptors_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), maxPushDescriptors(maxPushDescriptors_) { } VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR(PhysicalDevicePushDescriptorPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePushDescriptorPropertiesKHR(VkPhysicalDevicePushDescriptorPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDevicePushDescriptorPropertiesKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDevicePushDescriptorPropertiesKHR &operator=(PhysicalDevicePushDescriptorPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePushDescriptorPropertiesKHR &operator=(VkPhysicalDevicePushDescriptorPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDevicePushDescriptorPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDevicePushDescriptorPropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, maxPushDescriptors); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDevicePushDescriptorPropertiesKHR const &) const = default; #else bool operator==(PhysicalDevicePushDescriptorPropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (maxPushDescriptors == rhs.maxPushDescriptors); # endif } bool operator!=(PhysicalDevicePushDescriptorPropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR; void *pNext = {}; uint32_t maxPushDescriptors = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR) == sizeof(VkPhysicalDevicePushDescriptorPropertiesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDevicePushDescriptorPropertiesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDevicePushDescriptorPropertiesKHR; }; struct PhysicalDeviceRGBA10X6FormatsFeaturesEXT { using NativeType = VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceRGBA10X6FormatsFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), formatRgba10x6WithoutYCbCrSampler(formatRgba10x6WithoutYCbCrSampler_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceRGBA10X6FormatsFeaturesEXT(PhysicalDeviceRGBA10X6FormatsFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRGBA10X6FormatsFeaturesEXT(VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceRGBA10X6FormatsFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceRGBA10X6FormatsFeaturesEXT &operator=(PhysicalDeviceRGBA10X6FormatsFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRGBA10X6FormatsFeaturesEXT &operator=(VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRGBA10X6FormatsFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRGBA10X6FormatsFeaturesEXT & setFormatRgba10x6WithoutYCbCrSampler(VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler_) VULKAN_HPP_NOEXCEPT { formatRgba10x6WithoutYCbCrSampler = formatRgba10x6WithoutYCbCrSampler_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, formatRgba10x6WithoutYCbCrSampler); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceRGBA10X6FormatsFeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceRGBA10X6FormatsFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (formatRgba10x6WithoutYCbCrSampler == rhs.formatRgba10x6WithoutYCbCrSampler); # endif } bool operator!=(PhysicalDeviceRGBA10X6FormatsFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT) == sizeof(VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceRGBA10X6FormatsFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceRGBA10X6FormatsFeaturesEXT; }; struct PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM { using NativeType = VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM(VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), rasterizationOrderColorAttachmentAccess(rasterizationOrderColorAttachmentAccess_), rasterizationOrderDepthAttachmentAccess(rasterizationOrderDepthAttachmentAccess_), rasterizationOrderStencilAttachmentAccess(rasterizationOrderStencilAttachmentAccess_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM(PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM(VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM( *reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM & operator=(PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM & operator=(VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM & setRasterizationOrderColorAttachmentAccess(VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess_) VULKAN_HPP_NOEXCEPT { rasterizationOrderColorAttachmentAccess = rasterizationOrderColorAttachmentAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM & setRasterizationOrderDepthAttachmentAccess(VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess_) VULKAN_HPP_NOEXCEPT { rasterizationOrderDepthAttachmentAccess = rasterizationOrderDepthAttachmentAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM & setRasterizationOrderStencilAttachmentAccess(VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess_) VULKAN_HPP_NOEXCEPT { rasterizationOrderStencilAttachmentAccess = rasterizationOrderStencilAttachmentAccess_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, rasterizationOrderColorAttachmentAccess, rasterizationOrderDepthAttachmentAccess, rasterizationOrderStencilAttachmentAccess); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const &) const = default; #else bool operator==(PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (rasterizationOrderColorAttachmentAccess == rhs.rasterizationOrderColorAttachmentAccess) && (rasterizationOrderDepthAttachmentAccess == rhs.rasterizationOrderDepthAttachmentAccess) && (rasterizationOrderStencilAttachmentAccess == rhs.rasterizationOrderStencilAttachmentAccess); # endif } bool operator!=(PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess = {}; VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess = {}; VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM) == sizeof(VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM; }; struct PhysicalDeviceRayQueryFeaturesKHR { using NativeType = VkPhysicalDeviceRayQueryFeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayQueryFeaturesKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceRayQueryFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), rayQuery(rayQuery_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceRayQueryFeaturesKHR(PhysicalDeviceRayQueryFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRayQueryFeaturesKHR(VkPhysicalDeviceRayQueryFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceRayQueryFeaturesKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceRayQueryFeaturesKHR &operator=(PhysicalDeviceRayQueryFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRayQueryFeaturesKHR &operator=(VkPhysicalDeviceRayQueryFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayQueryFeaturesKHR &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayQueryFeaturesKHR &setRayQuery(VULKAN_HPP_NAMESPACE::Bool32 rayQuery_) VULKAN_HPP_NOEXCEPT { rayQuery = rayQuery_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceRayQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceRayQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, rayQuery); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceRayQueryFeaturesKHR const &) const = default; #else bool operator==(PhysicalDeviceRayQueryFeaturesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (rayQuery == rhs.rayQuery); # endif } bool operator!=(PhysicalDeviceRayQueryFeaturesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayQueryFeaturesKHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 rayQuery = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR) == sizeof(VkPhysicalDeviceRayQueryFeaturesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceRayQueryFeaturesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceRayQueryFeaturesKHR; }; struct PhysicalDeviceRayTracingMotionBlurFeaturesNV { using NativeType = VkPhysicalDeviceRayTracingMotionBlurFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMotionBlurFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), rayTracingMotionBlur(rayTracingMotionBlur_), rayTracingMotionBlurPipelineTraceRaysIndirect(rayTracingMotionBlurPipelineTraceRaysIndirect_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMotionBlurFeaturesNV(PhysicalDeviceRayTracingMotionBlurFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRayTracingMotionBlurFeaturesNV(VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceRayTracingMotionBlurFeaturesNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceRayTracingMotionBlurFeaturesNV &operator=(PhysicalDeviceRayTracingMotionBlurFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRayTracingMotionBlurFeaturesNV &operator=(VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV & setRayTracingMotionBlur(VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur_) VULKAN_HPP_NOEXCEPT { rayTracingMotionBlur = rayTracingMotionBlur_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV & setRayTracingMotionBlurPipelineTraceRaysIndirect(VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect_) VULKAN_HPP_NOEXCEPT { rayTracingMotionBlurPipelineTraceRaysIndirect = rayTracingMotionBlurPipelineTraceRaysIndirect_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceRayTracingMotionBlurFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, rayTracingMotionBlur, rayTracingMotionBlurPipelineTraceRaysIndirect); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceRayTracingMotionBlurFeaturesNV const &) const = default; #else bool operator==(PhysicalDeviceRayTracingMotionBlurFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (rayTracingMotionBlur == rhs.rayTracingMotionBlur) && (rayTracingMotionBlurPipelineTraceRaysIndirect == rhs.rayTracingMotionBlurPipelineTraceRaysIndirect); # endif } bool operator!=(PhysicalDeviceRayTracingMotionBlurFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur = {}; VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV) == sizeof(VkPhysicalDeviceRayTracingMotionBlurFeaturesNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceRayTracingMotionBlurFeaturesNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceRayTracingMotionBlurFeaturesNV; }; struct PhysicalDeviceRayTracingPipelineFeaturesKHR { using NativeType = VkPhysicalDeviceRayTracingPipelineFeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), rayTracingPipeline(rayTracingPipeline_), rayTracingPipelineShaderGroupHandleCaptureReplay(rayTracingPipelineShaderGroupHandleCaptureReplay_), rayTracingPipelineShaderGroupHandleCaptureReplayMixed(rayTracingPipelineShaderGroupHandleCaptureReplayMixed_), rayTracingPipelineTraceRaysIndirect(rayTracingPipelineTraceRaysIndirect_), rayTraversalPrimitiveCulling(rayTraversalPrimitiveCulling_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR(PhysicalDeviceRayTracingPipelineFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRayTracingPipelineFeaturesKHR(VkPhysicalDeviceRayTracingPipelineFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceRayTracingPipelineFeaturesKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceRayTracingPipelineFeaturesKHR &operator=(PhysicalDeviceRayTracingPipelineFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRayTracingPipelineFeaturesKHR &operator=(VkPhysicalDeviceRayTracingPipelineFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipeline(VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline_) VULKAN_HPP_NOEXCEPT { rayTracingPipeline = rayTracingPipeline_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipelineShaderGroupHandleCaptureReplay(VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_) VULKAN_HPP_NOEXCEPT { rayTracingPipelineShaderGroupHandleCaptureReplay = rayTracingPipelineShaderGroupHandleCaptureReplay_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR &setRayTracingPipelineShaderGroupHandleCaptureReplayMixed( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_) VULKAN_HPP_NOEXCEPT { rayTracingPipelineShaderGroupHandleCaptureReplayMixed = rayTracingPipelineShaderGroupHandleCaptureReplayMixed_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipelineTraceRaysIndirect(VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_) VULKAN_HPP_NOEXCEPT { rayTracingPipelineTraceRaysIndirect = rayTracingPipelineTraceRaysIndirect_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTraversalPrimitiveCulling(VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_) VULKAN_HPP_NOEXCEPT { rayTraversalPrimitiveCulling = rayTraversalPrimitiveCulling_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, rayTracingPipeline, rayTracingPipelineShaderGroupHandleCaptureReplay, rayTracingPipelineShaderGroupHandleCaptureReplayMixed, rayTracingPipelineTraceRaysIndirect, rayTraversalPrimitiveCulling); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceRayTracingPipelineFeaturesKHR const &) const = default; #else bool operator==(PhysicalDeviceRayTracingPipelineFeaturesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (rayTracingPipeline == rhs.rayTracingPipeline) && (rayTracingPipelineShaderGroupHandleCaptureReplay == rhs.rayTracingPipelineShaderGroupHandleCaptureReplay) && (rayTracingPipelineShaderGroupHandleCaptureReplayMixed == rhs.rayTracingPipelineShaderGroupHandleCaptureReplayMixed) && (rayTracingPipelineTraceRaysIndirect == rhs.rayTracingPipelineTraceRaysIndirect) && (rayTraversalPrimitiveCulling == rhs.rayTraversalPrimitiveCulling); # endif } bool operator!=(PhysicalDeviceRayTracingPipelineFeaturesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline = {}; VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay = {}; VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed = {}; VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect = {}; VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR) == sizeof(VkPhysicalDeviceRayTracingPipelineFeaturesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceRayTracingPipelineFeaturesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceRayTracingPipelineFeaturesKHR; }; struct PhysicalDeviceRayTracingPipelinePropertiesKHR { using NativeType = VkPhysicalDeviceRayTracingPipelinePropertiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelinePropertiesKHR(uint32_t shaderGroupHandleSize_ = {}, uint32_t maxRayRecursionDepth_ = {}, uint32_t maxShaderGroupStride_ = {}, uint32_t shaderGroupBaseAlignment_ = {}, uint32_t shaderGroupHandleCaptureReplaySize_ = {}, uint32_t maxRayDispatchInvocationCount_ = {}, uint32_t shaderGroupHandleAlignment_ = {}, uint32_t maxRayHitAttributeSize_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), shaderGroupHandleSize(shaderGroupHandleSize_), maxRayRecursionDepth(maxRayRecursionDepth_), maxShaderGroupStride(maxShaderGroupStride_), shaderGroupBaseAlignment(shaderGroupBaseAlignment_), shaderGroupHandleCaptureReplaySize(shaderGroupHandleCaptureReplaySize_), maxRayDispatchInvocationCount(maxRayDispatchInvocationCount_), shaderGroupHandleAlignment(shaderGroupHandleAlignment_), maxRayHitAttributeSize(maxRayHitAttributeSize_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelinePropertiesKHR(PhysicalDeviceRayTracingPipelinePropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRayTracingPipelinePropertiesKHR(VkPhysicalDeviceRayTracingPipelinePropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceRayTracingPipelinePropertiesKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceRayTracingPipelinePropertiesKHR &operator=(PhysicalDeviceRayTracingPipelinePropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRayTracingPipelinePropertiesKHR &operator=(VkPhysicalDeviceRayTracingPipelinePropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, shaderGroupHandleSize, maxRayRecursionDepth, maxShaderGroupStride, shaderGroupBaseAlignment, shaderGroupHandleCaptureReplaySize, maxRayDispatchInvocationCount, shaderGroupHandleAlignment, maxRayHitAttributeSize); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceRayTracingPipelinePropertiesKHR const &) const = default; #else bool operator==(PhysicalDeviceRayTracingPipelinePropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (shaderGroupHandleSize == rhs.shaderGroupHandleSize) && (maxRayRecursionDepth == rhs.maxRayRecursionDepth) && (maxShaderGroupStride == rhs.maxShaderGroupStride) && (shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment) && (shaderGroupHandleCaptureReplaySize == rhs.shaderGroupHandleCaptureReplaySize) && (maxRayDispatchInvocationCount == rhs.maxRayDispatchInvocationCount) && (shaderGroupHandleAlignment == rhs.shaderGroupHandleAlignment) && (maxRayHitAttributeSize == rhs.maxRayHitAttributeSize); # endif } bool operator!=(PhysicalDeviceRayTracingPipelinePropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR; void *pNext = {}; uint32_t shaderGroupHandleSize = {}; uint32_t maxRayRecursionDepth = {}; uint32_t maxShaderGroupStride = {}; uint32_t shaderGroupBaseAlignment = {}; uint32_t shaderGroupHandleCaptureReplaySize = {}; uint32_t maxRayDispatchInvocationCount = {}; uint32_t shaderGroupHandleAlignment = {}; uint32_t maxRayHitAttributeSize = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR) == sizeof(VkPhysicalDeviceRayTracingPipelinePropertiesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceRayTracingPipelinePropertiesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceRayTracingPipelinePropertiesKHR; }; struct PhysicalDeviceRayTracingPropertiesNV { using NativeType = VkPhysicalDeviceRayTracingPropertiesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPropertiesNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV(uint32_t shaderGroupHandleSize_ = {}, uint32_t maxRecursionDepth_ = {}, uint32_t maxShaderGroupStride_ = {}, uint32_t shaderGroupBaseAlignment_ = {}, uint64_t maxGeometryCount_ = {}, uint64_t maxInstanceCount_ = {}, uint64_t maxTriangleCount_ = {}, uint32_t maxDescriptorSetAccelerationStructures_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), shaderGroupHandleSize(shaderGroupHandleSize_), maxRecursionDepth(maxRecursionDepth_), maxShaderGroupStride(maxShaderGroupStride_), shaderGroupBaseAlignment(shaderGroupBaseAlignment_), maxGeometryCount(maxGeometryCount_), maxInstanceCount(maxInstanceCount_), maxTriangleCount(maxTriangleCount_), maxDescriptorSetAccelerationStructures(maxDescriptorSetAccelerationStructures_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV(PhysicalDeviceRayTracingPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRayTracingPropertiesNV(VkPhysicalDeviceRayTracingPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceRayTracingPropertiesNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceRayTracingPropertiesNV &operator=(PhysicalDeviceRayTracingPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRayTracingPropertiesNV &operator=(VkPhysicalDeviceRayTracingPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceRayTracingPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceRayTracingPropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, shaderGroupHandleSize, maxRecursionDepth, maxShaderGroupStride, shaderGroupBaseAlignment, maxGeometryCount, maxInstanceCount, maxTriangleCount, maxDescriptorSetAccelerationStructures); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceRayTracingPropertiesNV const &) const = default; #else bool operator==(PhysicalDeviceRayTracingPropertiesNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (shaderGroupHandleSize == rhs.shaderGroupHandleSize) && (maxRecursionDepth == rhs.maxRecursionDepth) && (maxShaderGroupStride == rhs.maxShaderGroupStride) && (shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment) && (maxGeometryCount == rhs.maxGeometryCount) && (maxInstanceCount == rhs.maxInstanceCount) && (maxTriangleCount == rhs.maxTriangleCount) && (maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures); # endif } bool operator!=(PhysicalDeviceRayTracingPropertiesNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesNV; void *pNext = {}; uint32_t shaderGroupHandleSize = {}; uint32_t maxRecursionDepth = {}; uint32_t maxShaderGroupStride = {}; uint32_t shaderGroupBaseAlignment = {}; uint64_t maxGeometryCount = {}; uint64_t maxInstanceCount = {}; uint64_t maxTriangleCount = {}; uint32_t maxDescriptorSetAccelerationStructures = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV) == sizeof(VkPhysicalDeviceRayTracingPropertiesNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceRayTracingPropertiesNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceRayTracingPropertiesNV; }; struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV { using NativeType = VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), representativeFragmentTest(representativeFragmentTest_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV(PhysicalDeviceRepresentativeFragmentTestFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRepresentativeFragmentTestFeaturesNV(VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceRepresentativeFragmentTestFeaturesNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceRepresentativeFragmentTestFeaturesNV & operator=(PhysicalDeviceRepresentativeFragmentTestFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRepresentativeFragmentTestFeaturesNV &operator=(VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRepresentativeFragmentTestFeaturesNV &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setRepresentativeFragmentTest(VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_) VULKAN_HPP_NOEXCEPT { representativeFragmentTest = representativeFragmentTest_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, representativeFragmentTest); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceRepresentativeFragmentTestFeaturesNV const &) const = default; #else bool operator==(PhysicalDeviceRepresentativeFragmentTestFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (representativeFragmentTest == rhs.representativeFragmentTest); # endif } bool operator!=(PhysicalDeviceRepresentativeFragmentTestFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV) == sizeof(VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceRepresentativeFragmentTestFeaturesNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceRepresentativeFragmentTestFeaturesNV; }; struct PhysicalDeviceRobustness2FeaturesEXT { using NativeType = VkPhysicalDeviceRobustness2FeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRobustness2FeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), robustBufferAccess2(robustBufferAccess2_), robustImageAccess2(robustImageAccess2_), nullDescriptor(nullDescriptor_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT(PhysicalDeviceRobustness2FeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRobustness2FeaturesEXT(VkPhysicalDeviceRobustness2FeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceRobustness2FeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceRobustness2FeaturesEXT &operator=(PhysicalDeviceRobustness2FeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRobustness2FeaturesEXT &operator=(VkPhysicalDeviceRobustness2FeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & setRobustBufferAccess2(VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_) VULKAN_HPP_NOEXCEPT { robustBufferAccess2 = robustBufferAccess2_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT &setRobustImageAccess2(VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_) VULKAN_HPP_NOEXCEPT { robustImageAccess2 = robustImageAccess2_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT &setNullDescriptor(VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_) VULKAN_HPP_NOEXCEPT { nullDescriptor = nullDescriptor_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceRobustness2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceRobustness2FeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, robustBufferAccess2, robustImageAccess2, nullDescriptor); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceRobustness2FeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceRobustness2FeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (robustBufferAccess2 == rhs.robustBufferAccess2) && (robustImageAccess2 == rhs.robustImageAccess2) && (nullDescriptor == rhs.nullDescriptor); # endif } bool operator!=(PhysicalDeviceRobustness2FeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2FeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2 = {}; VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2 = {}; VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT) == sizeof(VkPhysicalDeviceRobustness2FeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceRobustness2FeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceRobustness2FeaturesEXT; }; struct PhysicalDeviceRobustness2PropertiesEXT { using NativeType = VkPhysicalDeviceRobustness2PropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT(VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), robustStorageBufferAccessSizeAlignment(robustStorageBufferAccessSizeAlignment_), robustUniformBufferAccessSizeAlignment(robustUniformBufferAccessSizeAlignment_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT(PhysicalDeviceRobustness2PropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRobustness2PropertiesEXT(VkPhysicalDeviceRobustness2PropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceRobustness2PropertiesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceRobustness2PropertiesEXT &operator=(PhysicalDeviceRobustness2PropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRobustness2PropertiesEXT &operator=(VkPhysicalDeviceRobustness2PropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceRobustness2PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceRobustness2PropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, robustStorageBufferAccessSizeAlignment, robustUniformBufferAccessSizeAlignment); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceRobustness2PropertiesEXT const &) const = default; #else bool operator==(PhysicalDeviceRobustness2PropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (robustStorageBufferAccessSizeAlignment == rhs.robustStorageBufferAccessSizeAlignment) && (robustUniformBufferAccessSizeAlignment == rhs.robustUniformBufferAccessSizeAlignment); # endif } bool operator!=(PhysicalDeviceRobustness2PropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment = {}; VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT) == sizeof(VkPhysicalDeviceRobustness2PropertiesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceRobustness2PropertiesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceRobustness2PropertiesEXT; }; struct PhysicalDeviceSampleLocationsPropertiesEXT { using NativeType = VkPhysicalDeviceSampleLocationsPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT(VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}, std::array const &sampleLocationCoordinateRange_ = {}, uint32_t sampleLocationSubPixelBits_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), sampleLocationSampleCounts(sampleLocationSampleCounts_), maxSampleLocationGridSize(maxSampleLocationGridSize_), sampleLocationCoordinateRange(sampleLocationCoordinateRange_), sampleLocationSubPixelBits(sampleLocationSubPixelBits_), variableSampleLocations(variableSampleLocations_) { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT(PhysicalDeviceSampleLocationsPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSampleLocationsPropertiesEXT(VkPhysicalDeviceSampleLocationsPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceSampleLocationsPropertiesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceSampleLocationsPropertiesEXT &operator=(PhysicalDeviceSampleLocationsPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSampleLocationsPropertiesEXT &operator=(VkPhysicalDeviceSampleLocationsPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceSampleLocationsPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceSampleLocationsPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, sampleLocationSampleCounts, maxSampleLocationGridSize, sampleLocationCoordinateRange, sampleLocationSubPixelBits, variableSampleLocations); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceSampleLocationsPropertiesEXT const &) const = default; #else bool operator==(PhysicalDeviceSampleLocationsPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (sampleLocationSampleCounts == rhs.sampleLocationSampleCounts) && (maxSampleLocationGridSize == rhs.maxSampleLocationGridSize) && (sampleLocationCoordinateRange == rhs.sampleLocationCoordinateRange) && (sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits) && (variableSampleLocations == rhs.variableSampleLocations); # endif } bool operator!=(PhysicalDeviceSampleLocationsPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts = {}; VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D sampleLocationCoordinateRange = {}; uint32_t sampleLocationSubPixelBits = {}; VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT) == sizeof(VkPhysicalDeviceSampleLocationsPropertiesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceSampleLocationsPropertiesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceSampleLocationsPropertiesEXT; }; struct PhysicalDeviceSamplerFilterMinmaxProperties { using NativeType = VkPhysicalDeviceSamplerFilterMinmaxProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties(VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), filterMinmaxSingleComponentFormats(filterMinmaxSingleComponentFormats_), filterMinmaxImageComponentMapping(filterMinmaxImageComponentMapping_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties(PhysicalDeviceSamplerFilterMinmaxProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSamplerFilterMinmaxProperties(VkPhysicalDeviceSamplerFilterMinmaxProperties const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceSamplerFilterMinmaxProperties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceSamplerFilterMinmaxProperties &operator=(PhysicalDeviceSamplerFilterMinmaxProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSamplerFilterMinmaxProperties &operator=(VkPhysicalDeviceSamplerFilterMinmaxProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceSamplerFilterMinmaxProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceSamplerFilterMinmaxProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, filterMinmaxSingleComponentFormats, filterMinmaxImageComponentMapping); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceSamplerFilterMinmaxProperties const &) const = default; #else bool operator==(PhysicalDeviceSamplerFilterMinmaxProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats) && (filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping); # endif } bool operator!=(PhysicalDeviceSamplerFilterMinmaxProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {}; VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties) == sizeof(VkPhysicalDeviceSamplerFilterMinmaxProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceSamplerFilterMinmaxProperties is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceSamplerFilterMinmaxProperties; }; using PhysicalDeviceSamplerFilterMinmaxPropertiesEXT = PhysicalDeviceSamplerFilterMinmaxProperties; struct PhysicalDeviceSamplerYcbcrConversionFeatures { using NativeType = VkPhysicalDeviceSamplerYcbcrConversionFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures(VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), samplerYcbcrConversion(samplerYcbcrConversion_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures(PhysicalDeviceSamplerYcbcrConversionFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSamplerYcbcrConversionFeatures(VkPhysicalDeviceSamplerYcbcrConversionFeatures const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceSamplerYcbcrConversionFeatures(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceSamplerYcbcrConversionFeatures &operator=(PhysicalDeviceSamplerYcbcrConversionFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSamplerYcbcrConversionFeatures &operator=(VkPhysicalDeviceSamplerYcbcrConversionFeatures const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerYcbcrConversionFeatures &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerYcbcrConversionFeatures & setSamplerYcbcrConversion(VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_) VULKAN_HPP_NOEXCEPT { samplerYcbcrConversion = samplerYcbcrConversion_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceSamplerYcbcrConversionFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, samplerYcbcrConversion); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceSamplerYcbcrConversionFeatures const &) const = default; #else bool operator==(PhysicalDeviceSamplerYcbcrConversionFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (samplerYcbcrConversion == rhs.samplerYcbcrConversion); # endif } bool operator!=(PhysicalDeviceSamplerYcbcrConversionFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures) == sizeof(VkPhysicalDeviceSamplerYcbcrConversionFeatures), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceSamplerYcbcrConversionFeatures is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceSamplerYcbcrConversionFeatures; }; using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures; struct PhysicalDeviceScalarBlockLayoutFeatures { using NativeType = VkPhysicalDeviceScalarBlockLayoutFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures(VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), scalarBlockLayout(scalarBlockLayout_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures(PhysicalDeviceScalarBlockLayoutFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceScalarBlockLayoutFeatures(VkPhysicalDeviceScalarBlockLayoutFeatures const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceScalarBlockLayoutFeatures(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceScalarBlockLayoutFeatures &operator=(PhysicalDeviceScalarBlockLayoutFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceScalarBlockLayoutFeatures &operator=(VkPhysicalDeviceScalarBlockLayoutFeatures const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceScalarBlockLayoutFeatures &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceScalarBlockLayoutFeatures & setScalarBlockLayout(VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_) VULKAN_HPP_NOEXCEPT { scalarBlockLayout = scalarBlockLayout_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceScalarBlockLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceScalarBlockLayoutFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, scalarBlockLayout); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceScalarBlockLayoutFeatures const &) const = default; #else bool operator==(PhysicalDeviceScalarBlockLayoutFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (scalarBlockLayout == rhs.scalarBlockLayout); # endif } bool operator!=(PhysicalDeviceScalarBlockLayoutFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures) == sizeof(VkPhysicalDeviceScalarBlockLayoutFeatures), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceScalarBlockLayoutFeatures is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceScalarBlockLayoutFeatures; }; using PhysicalDeviceScalarBlockLayoutFeaturesEXT = PhysicalDeviceScalarBlockLayoutFeatures; struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures { using NativeType = VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures(VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), separateDepthStencilLayouts(separateDepthStencilLayouts_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures(PhysicalDeviceSeparateDepthStencilLayoutsFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSeparateDepthStencilLayoutsFeatures(VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceSeparateDepthStencilLayoutsFeatures(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceSeparateDepthStencilLayoutsFeatures & operator=(PhysicalDeviceSeparateDepthStencilLayoutsFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSeparateDepthStencilLayoutsFeatures &operator=(VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSeparateDepthStencilLayoutsFeatures &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSeparateDepthStencilLayoutsFeatures & setSeparateDepthStencilLayouts(VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_) VULKAN_HPP_NOEXCEPT { separateDepthStencilLayouts = separateDepthStencilLayouts_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, separateDepthStencilLayouts); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceSeparateDepthStencilLayoutsFeatures const &) const = default; #else bool operator==(PhysicalDeviceSeparateDepthStencilLayoutsFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (separateDepthStencilLayouts == rhs.separateDepthStencilLayouts); # endif } bool operator!=(PhysicalDeviceSeparateDepthStencilLayoutsFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures) == sizeof(VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceSeparateDepthStencilLayoutsFeatures is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceSeparateDepthStencilLayoutsFeatures; }; using PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = PhysicalDeviceSeparateDepthStencilLayoutsFeatures; struct PhysicalDeviceShaderAtomicFloat2FeaturesEXT { using NativeType = VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat2FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), shaderBufferFloat16Atomics(shaderBufferFloat16Atomics_), shaderBufferFloat16AtomicAdd(shaderBufferFloat16AtomicAdd_), shaderBufferFloat16AtomicMinMax(shaderBufferFloat16AtomicMinMax_), shaderBufferFloat32AtomicMinMax(shaderBufferFloat32AtomicMinMax_), shaderBufferFloat64AtomicMinMax(shaderBufferFloat64AtomicMinMax_), shaderSharedFloat16Atomics(shaderSharedFloat16Atomics_), shaderSharedFloat16AtomicAdd(shaderSharedFloat16AtomicAdd_), shaderSharedFloat16AtomicMinMax(shaderSharedFloat16AtomicMinMax_), shaderSharedFloat32AtomicMinMax(shaderSharedFloat32AtomicMinMax_), shaderSharedFloat64AtomicMinMax(shaderSharedFloat64AtomicMinMax_), shaderImageFloat32AtomicMinMax(shaderImageFloat32AtomicMinMax_), sparseImageFloat32AtomicMinMax(sparseImageFloat32AtomicMinMax_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat2FeaturesEXT(PhysicalDeviceShaderAtomicFloat2FeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderAtomicFloat2FeaturesEXT(VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderAtomicFloat2FeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceShaderAtomicFloat2FeaturesEXT &operator=(PhysicalDeviceShaderAtomicFloat2FeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderAtomicFloat2FeaturesEXT &operator=(VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderBufferFloat16Atomics(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics_) VULKAN_HPP_NOEXCEPT { shaderBufferFloat16Atomics = shaderBufferFloat16Atomics_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderBufferFloat16AtomicAdd(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd_) VULKAN_HPP_NOEXCEPT { shaderBufferFloat16AtomicAdd = shaderBufferFloat16AtomicAdd_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderBufferFloat16AtomicMinMax(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax_) VULKAN_HPP_NOEXCEPT { shaderBufferFloat16AtomicMinMax = shaderBufferFloat16AtomicMinMax_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderBufferFloat32AtomicMinMax(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax_) VULKAN_HPP_NOEXCEPT { shaderBufferFloat32AtomicMinMax = shaderBufferFloat32AtomicMinMax_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderBufferFloat64AtomicMinMax(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax_) VULKAN_HPP_NOEXCEPT { shaderBufferFloat64AtomicMinMax = shaderBufferFloat64AtomicMinMax_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderSharedFloat16Atomics(VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics_) VULKAN_HPP_NOEXCEPT { shaderSharedFloat16Atomics = shaderSharedFloat16Atomics_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderSharedFloat16AtomicAdd(VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd_) VULKAN_HPP_NOEXCEPT { shaderSharedFloat16AtomicAdd = shaderSharedFloat16AtomicAdd_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderSharedFloat16AtomicMinMax(VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax_) VULKAN_HPP_NOEXCEPT { shaderSharedFloat16AtomicMinMax = shaderSharedFloat16AtomicMinMax_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderSharedFloat32AtomicMinMax(VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax_) VULKAN_HPP_NOEXCEPT { shaderSharedFloat32AtomicMinMax = shaderSharedFloat32AtomicMinMax_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderSharedFloat64AtomicMinMax(VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax_) VULKAN_HPP_NOEXCEPT { shaderSharedFloat64AtomicMinMax = shaderSharedFloat64AtomicMinMax_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setShaderImageFloat32AtomicMinMax(VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax_) VULKAN_HPP_NOEXCEPT { shaderImageFloat32AtomicMinMax = shaderImageFloat32AtomicMinMax_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setSparseImageFloat32AtomicMinMax(VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax_) VULKAN_HPP_NOEXCEPT { sparseImageFloat32AtomicMinMax = sparseImageFloat32AtomicMinMax_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, shaderBufferFloat16Atomics, shaderBufferFloat16AtomicAdd, shaderBufferFloat16AtomicMinMax, shaderBufferFloat32AtomicMinMax, shaderBufferFloat64AtomicMinMax, shaderSharedFloat16Atomics, shaderSharedFloat16AtomicAdd, shaderSharedFloat16AtomicMinMax, shaderSharedFloat32AtomicMinMax, shaderSharedFloat64AtomicMinMax, shaderImageFloat32AtomicMinMax, sparseImageFloat32AtomicMinMax); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceShaderAtomicFloat2FeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceShaderAtomicFloat2FeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (shaderBufferFloat16Atomics == rhs.shaderBufferFloat16Atomics) && (shaderBufferFloat16AtomicAdd == rhs.shaderBufferFloat16AtomicAdd) && (shaderBufferFloat16AtomicMinMax == rhs.shaderBufferFloat16AtomicMinMax) && (shaderBufferFloat32AtomicMinMax == rhs.shaderBufferFloat32AtomicMinMax) && (shaderBufferFloat64AtomicMinMax == rhs.shaderBufferFloat64AtomicMinMax) && (shaderSharedFloat16Atomics == rhs.shaderSharedFloat16Atomics) && (shaderSharedFloat16AtomicAdd == rhs.shaderSharedFloat16AtomicAdd) && (shaderSharedFloat16AtomicMinMax == rhs.shaderSharedFloat16AtomicMinMax) && (shaderSharedFloat32AtomicMinMax == rhs.shaderSharedFloat32AtomicMinMax) && (shaderSharedFloat64AtomicMinMax == rhs.shaderSharedFloat64AtomicMinMax) && (shaderImageFloat32AtomicMinMax == rhs.shaderImageFloat32AtomicMinMax) && (sparseImageFloat32AtomicMinMax == rhs.sparseImageFloat32AtomicMinMax); # endif } bool operator!=(PhysicalDeviceShaderAtomicFloat2FeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax = {}; VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT) == sizeof(VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceShaderAtomicFloat2FeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceShaderAtomicFloat2FeaturesEXT; }; struct PhysicalDeviceShaderAtomicFloatFeaturesEXT { using NativeType = VkPhysicalDeviceShaderAtomicFloatFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), shaderBufferFloat32Atomics(shaderBufferFloat32Atomics_), shaderBufferFloat32AtomicAdd(shaderBufferFloat32AtomicAdd_), shaderBufferFloat64Atomics(shaderBufferFloat64Atomics_), shaderBufferFloat64AtomicAdd(shaderBufferFloat64AtomicAdd_), shaderSharedFloat32Atomics(shaderSharedFloat32Atomics_), shaderSharedFloat32AtomicAdd(shaderSharedFloat32AtomicAdd_), shaderSharedFloat64Atomics(shaderSharedFloat64Atomics_), shaderSharedFloat64AtomicAdd(shaderSharedFloat64AtomicAdd_), shaderImageFloat32Atomics(shaderImageFloat32Atomics_), shaderImageFloat32AtomicAdd(shaderImageFloat32AtomicAdd_), sparseImageFloat32Atomics(sparseImageFloat32Atomics_), sparseImageFloat32AtomicAdd(sparseImageFloat32AtomicAdd_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT(PhysicalDeviceShaderAtomicFloatFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderAtomicFloatFeaturesEXT(VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderAtomicFloatFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceShaderAtomicFloatFeaturesEXT &operator=(PhysicalDeviceShaderAtomicFloatFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderAtomicFloatFeaturesEXT &operator=(VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat32Atomics(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_) VULKAN_HPP_NOEXCEPT { shaderBufferFloat32Atomics = shaderBufferFloat32Atomics_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat32AtomicAdd(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_) VULKAN_HPP_NOEXCEPT { shaderBufferFloat32AtomicAdd = shaderBufferFloat32AtomicAdd_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat64Atomics(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_) VULKAN_HPP_NOEXCEPT { shaderBufferFloat64Atomics = shaderBufferFloat64Atomics_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat64AtomicAdd(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_) VULKAN_HPP_NOEXCEPT { shaderBufferFloat64AtomicAdd = shaderBufferFloat64AtomicAdd_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat32Atomics(VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_) VULKAN_HPP_NOEXCEPT { shaderSharedFloat32Atomics = shaderSharedFloat32Atomics_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat32AtomicAdd(VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_) VULKAN_HPP_NOEXCEPT { shaderSharedFloat32AtomicAdd = shaderSharedFloat32AtomicAdd_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat64Atomics(VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_) VULKAN_HPP_NOEXCEPT { shaderSharedFloat64Atomics = shaderSharedFloat64Atomics_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat64AtomicAdd(VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_) VULKAN_HPP_NOEXCEPT { shaderSharedFloat64AtomicAdd = shaderSharedFloat64AtomicAdd_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderImageFloat32Atomics(VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_) VULKAN_HPP_NOEXCEPT { shaderImageFloat32Atomics = shaderImageFloat32Atomics_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderImageFloat32AtomicAdd(VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_) VULKAN_HPP_NOEXCEPT { shaderImageFloat32AtomicAdd = shaderImageFloat32AtomicAdd_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setSparseImageFloat32Atomics(VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_) VULKAN_HPP_NOEXCEPT { sparseImageFloat32Atomics = sparseImageFloat32Atomics_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setSparseImageFloat32AtomicAdd(VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_) VULKAN_HPP_NOEXCEPT { sparseImageFloat32AtomicAdd = sparseImageFloat32AtomicAdd_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, shaderBufferFloat32Atomics, shaderBufferFloat32AtomicAdd, shaderBufferFloat64Atomics, shaderBufferFloat64AtomicAdd, shaderSharedFloat32Atomics, shaderSharedFloat32AtomicAdd, shaderSharedFloat64Atomics, shaderSharedFloat64AtomicAdd, shaderImageFloat32Atomics, shaderImageFloat32AtomicAdd, sparseImageFloat32Atomics, sparseImageFloat32AtomicAdd); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceShaderAtomicFloatFeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceShaderAtomicFloatFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (shaderBufferFloat32Atomics == rhs.shaderBufferFloat32Atomics) && (shaderBufferFloat32AtomicAdd == rhs.shaderBufferFloat32AtomicAdd) && (shaderBufferFloat64Atomics == rhs.shaderBufferFloat64Atomics) && (shaderBufferFloat64AtomicAdd == rhs.shaderBufferFloat64AtomicAdd) && (shaderSharedFloat32Atomics == rhs.shaderSharedFloat32Atomics) && (shaderSharedFloat32AtomicAdd == rhs.shaderSharedFloat32AtomicAdd) && (shaderSharedFloat64Atomics == rhs.shaderSharedFloat64Atomics) && (shaderSharedFloat64AtomicAdd == rhs.shaderSharedFloat64AtomicAdd) && (shaderImageFloat32Atomics == rhs.shaderImageFloat32Atomics) && (shaderImageFloat32AtomicAdd == rhs.shaderImageFloat32AtomicAdd) && (sparseImageFloat32Atomics == rhs.sparseImageFloat32Atomics) && (sparseImageFloat32AtomicAdd == rhs.sparseImageFloat32AtomicAdd); # endif } bool operator!=(PhysicalDeviceShaderAtomicFloatFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd = {}; VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics = {}; VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT) == sizeof(VkPhysicalDeviceShaderAtomicFloatFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceShaderAtomicFloatFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceShaderAtomicFloatFeaturesEXT; }; struct PhysicalDeviceShaderAtomicInt64Features { using NativeType = VkPhysicalDeviceShaderAtomicInt64Features; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicInt64Features; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), shaderBufferInt64Atomics(shaderBufferInt64Atomics_), shaderSharedInt64Atomics(shaderSharedInt64Atomics_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features(PhysicalDeviceShaderAtomicInt64Features const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderAtomicInt64Features(VkPhysicalDeviceShaderAtomicInt64Features const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderAtomicInt64Features(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceShaderAtomicInt64Features &operator=(PhysicalDeviceShaderAtomicInt64Features const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderAtomicInt64Features &operator=(VkPhysicalDeviceShaderAtomicInt64Features const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & setShaderBufferInt64Atomics(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_) VULKAN_HPP_NOEXCEPT { shaderBufferInt64Atomics = shaderBufferInt64Atomics_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & setShaderSharedInt64Atomics(VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_) VULKAN_HPP_NOEXCEPT { shaderSharedInt64Atomics = shaderSharedInt64Atomics_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceShaderAtomicInt64Features const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceShaderAtomicInt64Features &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, shaderBufferInt64Atomics, shaderSharedInt64Atomics); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceShaderAtomicInt64Features const &) const = default; #else bool operator==(PhysicalDeviceShaderAtomicInt64Features const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics) && (shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics); # endif } bool operator!=(PhysicalDeviceShaderAtomicInt64Features const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicInt64Features; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features) == sizeof(VkPhysicalDeviceShaderAtomicInt64Features), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceShaderAtomicInt64Features is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceShaderAtomicInt64Features; }; using PhysicalDeviceShaderAtomicInt64FeaturesKHR = PhysicalDeviceShaderAtomicInt64Features; struct PhysicalDeviceShaderClockFeaturesKHR { using NativeType = VkPhysicalDeviceShaderClockFeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), shaderSubgroupClock(shaderSubgroupClock_), shaderDeviceClock(shaderDeviceClock_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR(PhysicalDeviceShaderClockFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderClockFeaturesKHR(VkPhysicalDeviceShaderClockFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderClockFeaturesKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceShaderClockFeaturesKHR &operator=(PhysicalDeviceShaderClockFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderClockFeaturesKHR &operator=(VkPhysicalDeviceShaderClockFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & setShaderSubgroupClock(VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_) VULKAN_HPP_NOEXCEPT { shaderSubgroupClock = shaderSubgroupClock_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR &setShaderDeviceClock(VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_) VULKAN_HPP_NOEXCEPT { shaderDeviceClock = shaderDeviceClock_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceShaderClockFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceShaderClockFeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, shaderSubgroupClock, shaderDeviceClock); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceShaderClockFeaturesKHR const &) const = default; #else bool operator==(PhysicalDeviceShaderClockFeaturesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (shaderSubgroupClock == rhs.shaderSubgroupClock) && (shaderDeviceClock == rhs.shaderDeviceClock); # endif } bool operator!=(PhysicalDeviceShaderClockFeaturesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR) == sizeof(VkPhysicalDeviceShaderClockFeaturesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceShaderClockFeaturesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceShaderClockFeaturesKHR; }; struct PhysicalDeviceShaderCoreProperties2AMD { using NativeType = VkPhysicalDeviceShaderCoreProperties2AMD; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD(VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures_ = {}, uint32_t activeComputeUnitCount_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), shaderCoreFeatures(shaderCoreFeatures_), activeComputeUnitCount(activeComputeUnitCount_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD(PhysicalDeviceShaderCoreProperties2AMD const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderCoreProperties2AMD(VkPhysicalDeviceShaderCoreProperties2AMD const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderCoreProperties2AMD(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceShaderCoreProperties2AMD &operator=(PhysicalDeviceShaderCoreProperties2AMD const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderCoreProperties2AMD &operator=(VkPhysicalDeviceShaderCoreProperties2AMD const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceShaderCoreProperties2AMD const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceShaderCoreProperties2AMD &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, shaderCoreFeatures, activeComputeUnitCount); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceShaderCoreProperties2AMD const &) const = default; #else bool operator==(PhysicalDeviceShaderCoreProperties2AMD const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (shaderCoreFeatures == rhs.shaderCoreFeatures) && (activeComputeUnitCount == rhs.activeComputeUnitCount); # endif } bool operator!=(PhysicalDeviceShaderCoreProperties2AMD const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD; void *pNext = {}; VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures = {}; uint32_t activeComputeUnitCount = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD) == sizeof(VkPhysicalDeviceShaderCoreProperties2AMD), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceShaderCoreProperties2AMD is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceShaderCoreProperties2AMD; }; struct PhysicalDeviceShaderCorePropertiesAMD { using NativeType = VkPhysicalDeviceShaderCorePropertiesAMD; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD(uint32_t shaderEngineCount_ = {}, uint32_t shaderArraysPerEngineCount_ = {}, uint32_t computeUnitsPerShaderArray_ = {}, uint32_t simdPerComputeUnit_ = {}, uint32_t wavefrontsPerSimd_ = {}, uint32_t wavefrontSize_ = {}, uint32_t sgprsPerSimd_ = {}, uint32_t minSgprAllocation_ = {}, uint32_t maxSgprAllocation_ = {}, uint32_t sgprAllocationGranularity_ = {}, uint32_t vgprsPerSimd_ = {}, uint32_t minVgprAllocation_ = {}, uint32_t maxVgprAllocation_ = {}, uint32_t vgprAllocationGranularity_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), shaderEngineCount(shaderEngineCount_), shaderArraysPerEngineCount(shaderArraysPerEngineCount_), computeUnitsPerShaderArray(computeUnitsPerShaderArray_), simdPerComputeUnit(simdPerComputeUnit_), wavefrontsPerSimd(wavefrontsPerSimd_), wavefrontSize(wavefrontSize_), sgprsPerSimd(sgprsPerSimd_), minSgprAllocation(minSgprAllocation_), maxSgprAllocation(maxSgprAllocation_), sgprAllocationGranularity(sgprAllocationGranularity_), vgprsPerSimd(vgprsPerSimd_), minVgprAllocation(minVgprAllocation_), maxVgprAllocation(maxVgprAllocation_), vgprAllocationGranularity(vgprAllocationGranularity_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD(PhysicalDeviceShaderCorePropertiesAMD const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderCorePropertiesAMD(VkPhysicalDeviceShaderCorePropertiesAMD const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderCorePropertiesAMD(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceShaderCorePropertiesAMD &operator=(PhysicalDeviceShaderCorePropertiesAMD const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderCorePropertiesAMD &operator=(VkPhysicalDeviceShaderCorePropertiesAMD const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceShaderCorePropertiesAMD const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceShaderCorePropertiesAMD &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, shaderEngineCount, shaderArraysPerEngineCount, computeUnitsPerShaderArray, simdPerComputeUnit, wavefrontsPerSimd, wavefrontSize, sgprsPerSimd, minSgprAllocation, maxSgprAllocation, sgprAllocationGranularity, vgprsPerSimd, minVgprAllocation, maxVgprAllocation, vgprAllocationGranularity); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceShaderCorePropertiesAMD const &) const = default; #else bool operator==(PhysicalDeviceShaderCorePropertiesAMD const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (shaderEngineCount == rhs.shaderEngineCount) && (shaderArraysPerEngineCount == rhs.shaderArraysPerEngineCount) && (computeUnitsPerShaderArray == rhs.computeUnitsPerShaderArray) && (simdPerComputeUnit == rhs.simdPerComputeUnit) && (wavefrontsPerSimd == rhs.wavefrontsPerSimd) && (wavefrontSize == rhs.wavefrontSize) && (sgprsPerSimd == rhs.sgprsPerSimd) && (minSgprAllocation == rhs.minSgprAllocation) && (maxSgprAllocation == rhs.maxSgprAllocation) && (sgprAllocationGranularity == rhs.sgprAllocationGranularity) && (vgprsPerSimd == rhs.vgprsPerSimd) && (minVgprAllocation == rhs.minVgprAllocation) && (maxVgprAllocation == rhs.maxVgprAllocation) && (vgprAllocationGranularity == rhs.vgprAllocationGranularity); # endif } bool operator!=(PhysicalDeviceShaderCorePropertiesAMD const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD; void *pNext = {}; uint32_t shaderEngineCount = {}; uint32_t shaderArraysPerEngineCount = {}; uint32_t computeUnitsPerShaderArray = {}; uint32_t simdPerComputeUnit = {}; uint32_t wavefrontsPerSimd = {}; uint32_t wavefrontSize = {}; uint32_t sgprsPerSimd = {}; uint32_t minSgprAllocation = {}; uint32_t maxSgprAllocation = {}; uint32_t sgprAllocationGranularity = {}; uint32_t vgprsPerSimd = {}; uint32_t minVgprAllocation = {}; uint32_t maxVgprAllocation = {}; uint32_t vgprAllocationGranularity = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD) == sizeof(VkPhysicalDeviceShaderCorePropertiesAMD), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceShaderCorePropertiesAMD is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceShaderCorePropertiesAMD; }; struct PhysicalDeviceShaderDemoteToHelperInvocationFeatures { using NativeType = VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), shaderDemoteToHelperInvocation(shaderDemoteToHelperInvocation_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeatures(PhysicalDeviceShaderDemoteToHelperInvocationFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderDemoteToHelperInvocationFeatures(VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderDemoteToHelperInvocationFeatures(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceShaderDemoteToHelperInvocationFeatures & operator=(PhysicalDeviceShaderDemoteToHelperInvocationFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderDemoteToHelperInvocationFeatures &operator=(VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeatures &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeatures & setShaderDemoteToHelperInvocation(VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_) VULKAN_HPP_NOEXCEPT { shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, shaderDemoteToHelperInvocation); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceShaderDemoteToHelperInvocationFeatures const &) const = default; #else bool operator==(PhysicalDeviceShaderDemoteToHelperInvocationFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation); # endif } bool operator!=(PhysicalDeviceShaderDemoteToHelperInvocationFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures) == sizeof(VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceShaderDemoteToHelperInvocationFeatures is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceShaderDemoteToHelperInvocationFeatures; }; using PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = PhysicalDeviceShaderDemoteToHelperInvocationFeatures; struct PhysicalDeviceShaderDrawParametersFeatures { using NativeType = VkPhysicalDeviceShaderDrawParametersFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), shaderDrawParameters(shaderDrawParameters_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures(PhysicalDeviceShaderDrawParametersFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderDrawParametersFeatures(VkPhysicalDeviceShaderDrawParametersFeatures const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderDrawParametersFeatures(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceShaderDrawParametersFeatures &operator=(PhysicalDeviceShaderDrawParametersFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderDrawParametersFeatures &operator=(VkPhysicalDeviceShaderDrawParametersFeatures const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDrawParametersFeatures &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDrawParametersFeatures & setShaderDrawParameters(VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_) VULKAN_HPP_NOEXCEPT { shaderDrawParameters = shaderDrawParameters_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceShaderDrawParametersFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceShaderDrawParametersFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, shaderDrawParameters); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceShaderDrawParametersFeatures const &) const = default; #else bool operator==(PhysicalDeviceShaderDrawParametersFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (shaderDrawParameters == rhs.shaderDrawParameters); # endif } bool operator!=(PhysicalDeviceShaderDrawParametersFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures) == sizeof(VkPhysicalDeviceShaderDrawParametersFeatures), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceShaderDrawParametersFeatures is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceShaderDrawParametersFeatures; }; using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures; struct PhysicalDeviceShaderFloat16Int8Features { using NativeType = VkPhysicalDeviceShaderFloat16Int8Features; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderFloat16Int8Features; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features(VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), shaderFloat16(shaderFloat16_), shaderInt8(shaderInt8_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features(PhysicalDeviceShaderFloat16Int8Features const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderFloat16Int8Features(VkPhysicalDeviceShaderFloat16Int8Features const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderFloat16Int8Features(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceShaderFloat16Int8Features &operator=(PhysicalDeviceShaderFloat16Int8Features const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderFloat16Int8Features &operator=(VkPhysicalDeviceShaderFloat16Int8Features const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features &setShaderFloat16(VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_) VULKAN_HPP_NOEXCEPT { shaderFloat16 = shaderFloat16_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features &setShaderInt8(VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_) VULKAN_HPP_NOEXCEPT { shaderInt8 = shaderInt8_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceShaderFloat16Int8Features const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceShaderFloat16Int8Features &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, shaderFloat16, shaderInt8); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceShaderFloat16Int8Features const &) const = default; #else bool operator==(PhysicalDeviceShaderFloat16Int8Features const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (shaderFloat16 == rhs.shaderFloat16) && (shaderInt8 == rhs.shaderInt8); # endif } bool operator!=(PhysicalDeviceShaderFloat16Int8Features const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloat16Int8Features; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features) == sizeof(VkPhysicalDeviceShaderFloat16Int8Features), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceShaderFloat16Int8Features is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceShaderFloat16Int8Features; }; using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; struct PhysicalDeviceShaderImageAtomicInt64FeaturesEXT { using NativeType = VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), shaderImageInt64Atomics(shaderImageInt64Atomics_), sparseImageInt64Atomics(sparseImageInt64Atomics_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT(PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderImageAtomicInt64FeaturesEXT(VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderImageAtomicInt64FeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceShaderImageAtomicInt64FeaturesEXT &operator=(PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderImageAtomicInt64FeaturesEXT &operator=(VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setShaderImageInt64Atomics(VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_) VULKAN_HPP_NOEXCEPT { shaderImageInt64Atomics = shaderImageInt64Atomics_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setSparseImageInt64Atomics(VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_) VULKAN_HPP_NOEXCEPT { sparseImageInt64Atomics = sparseImageInt64Atomics_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, shaderImageInt64Atomics, sparseImageInt64Atomics); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (shaderImageInt64Atomics == rhs.shaderImageInt64Atomics) && (sparseImageInt64Atomics == rhs.sparseImageInt64Atomics); # endif } bool operator!=(PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics = {}; VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT) == sizeof(VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceShaderImageAtomicInt64FeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceShaderImageAtomicInt64FeaturesEXT; }; struct PhysicalDeviceShaderImageFootprintFeaturesNV { using NativeType = VkPhysicalDeviceShaderImageFootprintFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), imageFootprint(imageFootprint_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV(PhysicalDeviceShaderImageFootprintFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderImageFootprintFeaturesNV(VkPhysicalDeviceShaderImageFootprintFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderImageFootprintFeaturesNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceShaderImageFootprintFeaturesNV &operator=(PhysicalDeviceShaderImageFootprintFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderImageFootprintFeaturesNV &operator=(VkPhysicalDeviceShaderImageFootprintFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageFootprintFeaturesNV &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageFootprintFeaturesNV &setImageFootprint(VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_) VULKAN_HPP_NOEXCEPT { imageFootprint = imageFootprint_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceShaderImageFootprintFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, imageFootprint); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceShaderImageFootprintFeaturesNV const &) const = default; #else bool operator==(PhysicalDeviceShaderImageFootprintFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (imageFootprint == rhs.imageFootprint); # endif } bool operator!=(PhysicalDeviceShaderImageFootprintFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 imageFootprint = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV) == sizeof(VkPhysicalDeviceShaderImageFootprintFeaturesNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceShaderImageFootprintFeaturesNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceShaderImageFootprintFeaturesNV; }; struct PhysicalDeviceShaderIntegerDotProductFeatures { using NativeType = VkPhysicalDeviceShaderIntegerDotProductFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), shaderIntegerDotProduct(shaderIntegerDotProduct_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductFeatures(PhysicalDeviceShaderIntegerDotProductFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderIntegerDotProductFeatures(VkPhysicalDeviceShaderIntegerDotProductFeatures const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderIntegerDotProductFeatures(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceShaderIntegerDotProductFeatures &operator=(PhysicalDeviceShaderIntegerDotProductFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderIntegerDotProductFeatures &operator=(VkPhysicalDeviceShaderIntegerDotProductFeatures const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductFeatures &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductFeatures & setShaderIntegerDotProduct(VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_) VULKAN_HPP_NOEXCEPT { shaderIntegerDotProduct = shaderIntegerDotProduct_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceShaderIntegerDotProductFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceShaderIntegerDotProductFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, shaderIntegerDotProduct); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceShaderIntegerDotProductFeatures const &) const = default; #else bool operator==(PhysicalDeviceShaderIntegerDotProductFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (shaderIntegerDotProduct == rhs.shaderIntegerDotProduct); # endif } bool operator!=(PhysicalDeviceShaderIntegerDotProductFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures) == sizeof(VkPhysicalDeviceShaderIntegerDotProductFeatures), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceShaderIntegerDotProductFeatures is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceShaderIntegerDotProductFeatures; }; using PhysicalDeviceShaderIntegerDotProductFeaturesKHR = PhysicalDeviceShaderIntegerDotProductFeatures; struct PhysicalDeviceShaderIntegerDotProductProperties { using NativeType = VkPhysicalDeviceShaderIntegerDotProductProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerDotProductProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductProperties( VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), integerDotProduct8BitUnsignedAccelerated(integerDotProduct8BitUnsignedAccelerated_), integerDotProduct8BitSignedAccelerated(integerDotProduct8BitSignedAccelerated_), integerDotProduct8BitMixedSignednessAccelerated(integerDotProduct8BitMixedSignednessAccelerated_), integerDotProduct4x8BitPackedUnsignedAccelerated(integerDotProduct4x8BitPackedUnsignedAccelerated_), integerDotProduct4x8BitPackedSignedAccelerated(integerDotProduct4x8BitPackedSignedAccelerated_), integerDotProduct4x8BitPackedMixedSignednessAccelerated(integerDotProduct4x8BitPackedMixedSignednessAccelerated_), integerDotProduct16BitUnsignedAccelerated(integerDotProduct16BitUnsignedAccelerated_), integerDotProduct16BitSignedAccelerated(integerDotProduct16BitSignedAccelerated_), integerDotProduct16BitMixedSignednessAccelerated(integerDotProduct16BitMixedSignednessAccelerated_), integerDotProduct32BitUnsignedAccelerated(integerDotProduct32BitUnsignedAccelerated_), integerDotProduct32BitSignedAccelerated(integerDotProduct32BitSignedAccelerated_), integerDotProduct32BitMixedSignednessAccelerated(integerDotProduct32BitMixedSignednessAccelerated_), integerDotProduct64BitUnsignedAccelerated(integerDotProduct64BitUnsignedAccelerated_), integerDotProduct64BitSignedAccelerated(integerDotProduct64BitSignedAccelerated_), integerDotProduct64BitMixedSignednessAccelerated(integerDotProduct64BitMixedSignednessAccelerated_), integerDotProductAccumulatingSaturating8BitUnsignedAccelerated(integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_), integerDotProductAccumulatingSaturating8BitSignedAccelerated(integerDotProductAccumulatingSaturating8BitSignedAccelerated_), integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated(integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_), integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated(integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_), integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated(integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_), integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_), integerDotProductAccumulatingSaturating16BitUnsignedAccelerated(integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_), integerDotProductAccumulatingSaturating16BitSignedAccelerated(integerDotProductAccumulatingSaturating16BitSignedAccelerated_), integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated(integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_), integerDotProductAccumulatingSaturating32BitUnsignedAccelerated(integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_), integerDotProductAccumulatingSaturating32BitSignedAccelerated(integerDotProductAccumulatingSaturating32BitSignedAccelerated_), integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated(integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_), integerDotProductAccumulatingSaturating64BitUnsignedAccelerated(integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_), integerDotProductAccumulatingSaturating64BitSignedAccelerated(integerDotProductAccumulatingSaturating64BitSignedAccelerated_), integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated(integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductProperties(PhysicalDeviceShaderIntegerDotProductProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderIntegerDotProductProperties(VkPhysicalDeviceShaderIntegerDotProductProperties const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderIntegerDotProductProperties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceShaderIntegerDotProductProperties &operator=(PhysicalDeviceShaderIntegerDotProductProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderIntegerDotProductProperties &operator=(VkPhysicalDeviceShaderIntegerDotProductProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceShaderIntegerDotProductProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceShaderIntegerDotProductProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, integerDotProduct8BitUnsignedAccelerated, integerDotProduct8BitSignedAccelerated, integerDotProduct8BitMixedSignednessAccelerated, integerDotProduct4x8BitPackedUnsignedAccelerated, integerDotProduct4x8BitPackedSignedAccelerated, integerDotProduct4x8BitPackedMixedSignednessAccelerated, integerDotProduct16BitUnsignedAccelerated, integerDotProduct16BitSignedAccelerated, integerDotProduct16BitMixedSignednessAccelerated, integerDotProduct32BitUnsignedAccelerated, integerDotProduct32BitSignedAccelerated, integerDotProduct32BitMixedSignednessAccelerated, integerDotProduct64BitUnsignedAccelerated, integerDotProduct64BitSignedAccelerated, integerDotProduct64BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating8BitUnsignedAccelerated, integerDotProductAccumulatingSaturating8BitSignedAccelerated, integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated, integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated, integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated, integerDotProductAccumulatingSaturating16BitUnsignedAccelerated, integerDotProductAccumulatingSaturating16BitSignedAccelerated, integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating32BitUnsignedAccelerated, integerDotProductAccumulatingSaturating32BitSignedAccelerated, integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating64BitUnsignedAccelerated, integerDotProductAccumulatingSaturating64BitSignedAccelerated, integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceShaderIntegerDotProductProperties const &) const = default; #else bool operator==(PhysicalDeviceShaderIntegerDotProductProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (integerDotProduct8BitUnsignedAccelerated == rhs.integerDotProduct8BitUnsignedAccelerated) && (integerDotProduct8BitSignedAccelerated == rhs.integerDotProduct8BitSignedAccelerated) && (integerDotProduct8BitMixedSignednessAccelerated == rhs.integerDotProduct8BitMixedSignednessAccelerated) && (integerDotProduct4x8BitPackedUnsignedAccelerated == rhs.integerDotProduct4x8BitPackedUnsignedAccelerated) && (integerDotProduct4x8BitPackedSignedAccelerated == rhs.integerDotProduct4x8BitPackedSignedAccelerated) && (integerDotProduct4x8BitPackedMixedSignednessAccelerated == rhs.integerDotProduct4x8BitPackedMixedSignednessAccelerated) && (integerDotProduct16BitUnsignedAccelerated == rhs.integerDotProduct16BitUnsignedAccelerated) && (integerDotProduct16BitSignedAccelerated == rhs.integerDotProduct16BitSignedAccelerated) && (integerDotProduct16BitMixedSignednessAccelerated == rhs.integerDotProduct16BitMixedSignednessAccelerated) && (integerDotProduct32BitUnsignedAccelerated == rhs.integerDotProduct32BitUnsignedAccelerated) && (integerDotProduct32BitSignedAccelerated == rhs.integerDotProduct32BitSignedAccelerated) && (integerDotProduct32BitMixedSignednessAccelerated == rhs.integerDotProduct32BitMixedSignednessAccelerated) && (integerDotProduct64BitUnsignedAccelerated == rhs.integerDotProduct64BitUnsignedAccelerated) && (integerDotProduct64BitSignedAccelerated == rhs.integerDotProduct64BitSignedAccelerated) && (integerDotProduct64BitMixedSignednessAccelerated == rhs.integerDotProduct64BitMixedSignednessAccelerated) && (integerDotProductAccumulatingSaturating8BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated) && (integerDotProductAccumulatingSaturating8BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitSignedAccelerated) && (integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated) && (integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated) && (integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated == rhs.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated) && (integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated) && (integerDotProductAccumulatingSaturating16BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated) && (integerDotProductAccumulatingSaturating16BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitSignedAccelerated) && (integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated) && (integerDotProductAccumulatingSaturating32BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated) && (integerDotProductAccumulatingSaturating32BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitSignedAccelerated) && (integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated) && (integerDotProductAccumulatingSaturating64BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated) && (integerDotProductAccumulatingSaturating64BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitSignedAccelerated) && (integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated); # endif } bool operator!=(PhysicalDeviceShaderIntegerDotProductProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerDotProductProperties; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties) == sizeof(VkPhysicalDeviceShaderIntegerDotProductProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceShaderIntegerDotProductProperties is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceShaderIntegerDotProductProperties; }; using PhysicalDeviceShaderIntegerDotProductPropertiesKHR = PhysicalDeviceShaderIntegerDotProductProperties; struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL { using NativeType = VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL(VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), shaderIntegerFunctions2(shaderIntegerFunctions2_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL(PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL(VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & operator=(PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &operator=(VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setShaderIntegerFunctions2(VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_) VULKAN_HPP_NOEXCEPT { shaderIntegerFunctions2 = shaderIntegerFunctions2_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, shaderIntegerFunctions2); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const &) const = default; #else bool operator==(PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (shaderIntegerFunctions2 == rhs.shaderIntegerFunctions2); # endif } bool operator!=(PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2 = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL) == sizeof(VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; }; struct PhysicalDeviceShaderSMBuiltinsFeaturesNV { using NativeType = VkPhysicalDeviceShaderSMBuiltinsFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), shaderSMBuiltins(shaderSMBuiltins_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV(PhysicalDeviceShaderSMBuiltinsFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderSMBuiltinsFeaturesNV(VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderSMBuiltinsFeaturesNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceShaderSMBuiltinsFeaturesNV &operator=(PhysicalDeviceShaderSMBuiltinsFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderSMBuiltinsFeaturesNV &operator=(VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSMBuiltinsFeaturesNV &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSMBuiltinsFeaturesNV &setShaderSMBuiltins(VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_) VULKAN_HPP_NOEXCEPT { shaderSMBuiltins = shaderSMBuiltins_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, shaderSMBuiltins); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceShaderSMBuiltinsFeaturesNV const &) const = default; #else bool operator==(PhysicalDeviceShaderSMBuiltinsFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (shaderSMBuiltins == rhs.shaderSMBuiltins); # endif } bool operator!=(PhysicalDeviceShaderSMBuiltinsFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV) == sizeof(VkPhysicalDeviceShaderSMBuiltinsFeaturesNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceShaderSMBuiltinsFeaturesNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceShaderSMBuiltinsFeaturesNV; }; struct PhysicalDeviceShaderSMBuiltinsPropertiesNV { using NativeType = VkPhysicalDeviceShaderSMBuiltinsPropertiesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV(uint32_t shaderSMCount_ = {}, uint32_t shaderWarpsPerSM_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), shaderSMCount(shaderSMCount_), shaderWarpsPerSM(shaderWarpsPerSM_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV(PhysicalDeviceShaderSMBuiltinsPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderSMBuiltinsPropertiesNV(VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderSMBuiltinsPropertiesNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceShaderSMBuiltinsPropertiesNV &operator=(PhysicalDeviceShaderSMBuiltinsPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderSMBuiltinsPropertiesNV &operator=(VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, shaderSMCount, shaderWarpsPerSM); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceShaderSMBuiltinsPropertiesNV const &) const = default; #else bool operator==(PhysicalDeviceShaderSMBuiltinsPropertiesNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (shaderSMCount == rhs.shaderSMCount) && (shaderWarpsPerSM == rhs.shaderWarpsPerSM); # endif } bool operator!=(PhysicalDeviceShaderSMBuiltinsPropertiesNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV; void *pNext = {}; uint32_t shaderSMCount = {}; uint32_t shaderWarpsPerSM = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV) == sizeof(VkPhysicalDeviceShaderSMBuiltinsPropertiesNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceShaderSMBuiltinsPropertiesNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceShaderSMBuiltinsPropertiesNV; }; struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures { using NativeType = VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), shaderSubgroupExtendedTypes(shaderSubgroupExtendedTypes_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures(PhysicalDeviceShaderSubgroupExtendedTypesFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderSubgroupExtendedTypesFeatures(VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderSubgroupExtendedTypesFeatures(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceShaderSubgroupExtendedTypesFeatures & operator=(PhysicalDeviceShaderSubgroupExtendedTypesFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderSubgroupExtendedTypesFeatures &operator=(VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupExtendedTypesFeatures &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupExtendedTypesFeatures & setShaderSubgroupExtendedTypes(VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_) VULKAN_HPP_NOEXCEPT { shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, shaderSubgroupExtendedTypes); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceShaderSubgroupExtendedTypesFeatures const &) const = default; #else bool operator==(PhysicalDeviceShaderSubgroupExtendedTypesFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes); # endif } bool operator!=(PhysicalDeviceShaderSubgroupExtendedTypesFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures) == sizeof(VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceShaderSubgroupExtendedTypesFeatures is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceShaderSubgroupExtendedTypesFeatures; }; using PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = PhysicalDeviceShaderSubgroupExtendedTypesFeatures; struct PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR { using NativeType = VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), shaderSubgroupUniformControlFlow(shaderSubgroupUniformControlFlow_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR(PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR(VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( *reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & operator=(PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & operator=(VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & setShaderSubgroupUniformControlFlow(VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow_) VULKAN_HPP_NOEXCEPT { shaderSubgroupUniformControlFlow = shaderSubgroupUniformControlFlow_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, shaderSubgroupUniformControlFlow); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const &) const = default; #else bool operator==(PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (shaderSubgroupUniformControlFlow == rhs.shaderSubgroupUniformControlFlow); # endif } bool operator!=(PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR) == sizeof(VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; }; struct PhysicalDeviceShaderTerminateInvocationFeatures { using NativeType = VkPhysicalDeviceShaderTerminateInvocationFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTerminateInvocationFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), shaderTerminateInvocation(shaderTerminateInvocation_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTerminateInvocationFeatures(PhysicalDeviceShaderTerminateInvocationFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderTerminateInvocationFeatures(VkPhysicalDeviceShaderTerminateInvocationFeatures const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderTerminateInvocationFeatures(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceShaderTerminateInvocationFeatures &operator=(PhysicalDeviceShaderTerminateInvocationFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderTerminateInvocationFeatures &operator=(VkPhysicalDeviceShaderTerminateInvocationFeatures const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTerminateInvocationFeatures &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTerminateInvocationFeatures & setShaderTerminateInvocation(VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_) VULKAN_HPP_NOEXCEPT { shaderTerminateInvocation = shaderTerminateInvocation_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceShaderTerminateInvocationFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceShaderTerminateInvocationFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, shaderTerminateInvocation); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceShaderTerminateInvocationFeatures const &) const = default; #else bool operator==(PhysicalDeviceShaderTerminateInvocationFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (shaderTerminateInvocation == rhs.shaderTerminateInvocation); # endif } bool operator!=(PhysicalDeviceShaderTerminateInvocationFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures) == sizeof(VkPhysicalDeviceShaderTerminateInvocationFeatures), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceShaderTerminateInvocationFeatures is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceShaderTerminateInvocationFeatures; }; using PhysicalDeviceShaderTerminateInvocationFeaturesKHR = PhysicalDeviceShaderTerminateInvocationFeatures; struct PhysicalDeviceShadingRateImageFeaturesNV { using NativeType = VkPhysicalDeviceShadingRateImageFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), shadingRateImage(shadingRateImage_), shadingRateCoarseSampleOrder(shadingRateCoarseSampleOrder_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV(PhysicalDeviceShadingRateImageFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShadingRateImageFeaturesNV(VkPhysicalDeviceShadingRateImageFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShadingRateImageFeaturesNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceShadingRateImageFeaturesNV &operator=(PhysicalDeviceShadingRateImageFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShadingRateImageFeaturesNV &operator=(VkPhysicalDeviceShadingRateImageFeaturesNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV &setShadingRateImage(VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_) VULKAN_HPP_NOEXCEPT { shadingRateImage = shadingRateImage_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateCoarseSampleOrder(VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_) VULKAN_HPP_NOEXCEPT { shadingRateCoarseSampleOrder = shadingRateCoarseSampleOrder_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceShadingRateImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceShadingRateImageFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, shadingRateImage, shadingRateCoarseSampleOrder); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceShadingRateImageFeaturesNV const &) const = default; #else bool operator==(PhysicalDeviceShadingRateImageFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (shadingRateImage == rhs.shadingRateImage) && (shadingRateCoarseSampleOrder == rhs.shadingRateCoarseSampleOrder); # endif } bool operator!=(PhysicalDeviceShadingRateImageFeaturesNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage = {}; VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV) == sizeof(VkPhysicalDeviceShadingRateImageFeaturesNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceShadingRateImageFeaturesNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceShadingRateImageFeaturesNV; }; struct PhysicalDeviceShadingRateImagePropertiesNV { using NativeType = VkPhysicalDeviceShadingRateImagePropertiesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV(VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize_ = {}, uint32_t shadingRatePaletteSize_ = {}, uint32_t shadingRateMaxCoarseSamples_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), shadingRateTexelSize(shadingRateTexelSize_), shadingRatePaletteSize(shadingRatePaletteSize_), shadingRateMaxCoarseSamples(shadingRateMaxCoarseSamples_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV(PhysicalDeviceShadingRateImagePropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShadingRateImagePropertiesNV(VkPhysicalDeviceShadingRateImagePropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShadingRateImagePropertiesNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceShadingRateImagePropertiesNV &operator=(PhysicalDeviceShadingRateImagePropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShadingRateImagePropertiesNV &operator=(VkPhysicalDeviceShadingRateImagePropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceShadingRateImagePropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceShadingRateImagePropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, shadingRateTexelSize, shadingRatePaletteSize, shadingRateMaxCoarseSamples); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceShadingRateImagePropertiesNV const &) const = default; #else bool operator==(PhysicalDeviceShadingRateImagePropertiesNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (shadingRateTexelSize == rhs.shadingRateTexelSize) && (shadingRatePaletteSize == rhs.shadingRatePaletteSize) && (shadingRateMaxCoarseSamples == rhs.shadingRateMaxCoarseSamples); # endif } bool operator!=(PhysicalDeviceShadingRateImagePropertiesNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV; void *pNext = {}; VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize = {}; uint32_t shadingRatePaletteSize = {}; uint32_t shadingRateMaxCoarseSamples = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV) == sizeof(VkPhysicalDeviceShadingRateImagePropertiesNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceShadingRateImagePropertiesNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceShadingRateImagePropertiesNV; }; struct PhysicalDeviceSparseImageFormatInfo2 { using NativeType = VkPhysicalDeviceSparseImageFormatInfo2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSparseImageFormatInfo2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), format(format_), type(type_), samples(samples_), usage(usage_), tiling(tiling_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2(PhysicalDeviceSparseImageFormatInfo2 const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSparseImageFormatInfo2(VkPhysicalDeviceSparseImageFormatInfo2 const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceSparseImageFormatInfo2(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceSparseImageFormatInfo2 &operator=(PhysicalDeviceSparseImageFormatInfo2 const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSparseImageFormatInfo2 &operator=(VkPhysicalDeviceSparseImageFormatInfo2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 &setFormat(VULKAN_HPP_NAMESPACE::Format format_) VULKAN_HPP_NOEXCEPT { format = format_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 &setType(VULKAN_HPP_NAMESPACE::ImageType type_) VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 &setSamples(VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_) VULKAN_HPP_NOEXCEPT { samples = samples_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 &setUsage(VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_) VULKAN_HPP_NOEXCEPT { usage = usage_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 &setTiling(VULKAN_HPP_NAMESPACE::ImageTiling tiling_) VULKAN_HPP_NOEXCEPT { tiling = tiling_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceSparseImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceSparseImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, format, type, samples, usage, tiling); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceSparseImageFormatInfo2 const &) const = default; #else bool operator==(PhysicalDeviceSparseImageFormatInfo2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (format == rhs.format) && (type == rhs.type) && (samples == rhs.samples) && (usage == rhs.usage) && (tiling == rhs.tiling); # endif } bool operator!=(PhysicalDeviceSparseImageFormatInfo2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSparseImageFormatInfo2; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D; VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2) == sizeof(VkPhysicalDeviceSparseImageFormatInfo2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceSparseImageFormatInfo2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceSparseImageFormatInfo2; }; using PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2; struct PhysicalDeviceSubgroupProperties { using NativeType = VkPhysicalDeviceSubgroupProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties(uint32_t subgroupSize_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages_ = {}, VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations_ = {}, VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), subgroupSize(subgroupSize_), supportedStages(supportedStages_), supportedOperations(supportedOperations_), quadOperationsInAllStages(quadOperationsInAllStages_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties(PhysicalDeviceSubgroupProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSubgroupProperties(VkPhysicalDeviceSubgroupProperties const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceSubgroupProperties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceSubgroupProperties &operator=(PhysicalDeviceSubgroupProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSubgroupProperties &operator=(VkPhysicalDeviceSubgroupProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceSubgroupProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceSubgroupProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, subgroupSize, supportedStages, supportedOperations, quadOperationsInAllStages); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceSubgroupProperties const &) const = default; #else bool operator==(PhysicalDeviceSubgroupProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (subgroupSize == rhs.subgroupSize) && (supportedStages == rhs.supportedStages) && (supportedOperations == rhs.supportedOperations) && (quadOperationsInAllStages == rhs.quadOperationsInAllStages); # endif } bool operator!=(PhysicalDeviceSubgroupProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties; void *pNext = {}; uint32_t subgroupSize = {}; VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages = {}; VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations = {}; VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties) == sizeof(VkPhysicalDeviceSubgroupProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceSubgroupProperties is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceSubgroupProperties; }; struct PhysicalDeviceSubgroupSizeControlFeatures { using NativeType = VkPhysicalDeviceSubgroupSizeControlFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupSizeControlFeatures; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeatures(VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {}, VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), subgroupSizeControl(subgroupSizeControl_), computeFullSubgroups(computeFullSubgroups_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeatures(PhysicalDeviceSubgroupSizeControlFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSubgroupSizeControlFeatures(VkPhysicalDeviceSubgroupSizeControlFeatures const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceSubgroupSizeControlFeatures(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceSubgroupSizeControlFeatures &operator=(PhysicalDeviceSubgroupSizeControlFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSubgroupSizeControlFeatures &operator=(VkPhysicalDeviceSubgroupSizeControlFeatures const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & setSubgroupSizeControl(VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_) VULKAN_HPP_NOEXCEPT { subgroupSizeControl = subgroupSizeControl_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & setComputeFullSubgroups(VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_) VULKAN_HPP_NOEXCEPT { computeFullSubgroups = computeFullSubgroups_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceSubgroupSizeControlFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceSubgroupSizeControlFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, subgroupSizeControl, computeFullSubgroups); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceSubgroupSizeControlFeatures const &) const = default; #else bool operator==(PhysicalDeviceSubgroupSizeControlFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (subgroupSizeControl == rhs.subgroupSizeControl) && (computeFullSubgroups == rhs.computeFullSubgroups); # endif } bool operator!=(PhysicalDeviceSubgroupSizeControlFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlFeatures; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {}; VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures) == sizeof(VkPhysicalDeviceSubgroupSizeControlFeatures), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceSubgroupSizeControlFeatures is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceSubgroupSizeControlFeatures; }; using PhysicalDeviceSubgroupSizeControlFeaturesEXT = PhysicalDeviceSubgroupSizeControlFeatures; struct PhysicalDeviceSubgroupSizeControlProperties { using NativeType = VkPhysicalDeviceSubgroupSizeControlProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupSizeControlProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlProperties(uint32_t minSubgroupSize_ = {}, uint32_t maxSubgroupSize_ = {}, uint32_t maxComputeWorkgroupSubgroups_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), minSubgroupSize(minSubgroupSize_), maxSubgroupSize(maxSubgroupSize_), maxComputeWorkgroupSubgroups(maxComputeWorkgroupSubgroups_), requiredSubgroupSizeStages(requiredSubgroupSizeStages_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlProperties(PhysicalDeviceSubgroupSizeControlProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSubgroupSizeControlProperties(VkPhysicalDeviceSubgroupSizeControlProperties const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceSubgroupSizeControlProperties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceSubgroupSizeControlProperties &operator=(PhysicalDeviceSubgroupSizeControlProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSubgroupSizeControlProperties &operator=(VkPhysicalDeviceSubgroupSizeControlProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceSubgroupSizeControlProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceSubgroupSizeControlProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, minSubgroupSize, maxSubgroupSize, maxComputeWorkgroupSubgroups, requiredSubgroupSizeStages); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceSubgroupSizeControlProperties const &) const = default; #else bool operator==(PhysicalDeviceSubgroupSizeControlProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (minSubgroupSize == rhs.minSubgroupSize) && (maxSubgroupSize == rhs.maxSubgroupSize) && (maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups) && (requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages); # endif } bool operator!=(PhysicalDeviceSubgroupSizeControlProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlProperties; void *pNext = {}; uint32_t minSubgroupSize = {}; uint32_t maxSubgroupSize = {}; uint32_t maxComputeWorkgroupSubgroups = {}; VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties) == sizeof(VkPhysicalDeviceSubgroupSizeControlProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceSubgroupSizeControlProperties is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceSubgroupSizeControlProperties; }; using PhysicalDeviceSubgroupSizeControlPropertiesEXT = PhysicalDeviceSubgroupSizeControlProperties; struct PhysicalDeviceSubpassShadingFeaturesHUAWEI { using NativeType = VkPhysicalDeviceSubpassShadingFeaturesHUAWEI; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingFeaturesHUAWEI(VULKAN_HPP_NAMESPACE::Bool32 subpassShading_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), subpassShading(subpassShading_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingFeaturesHUAWEI(PhysicalDeviceSubpassShadingFeaturesHUAWEI const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSubpassShadingFeaturesHUAWEI(VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceSubpassShadingFeaturesHUAWEI(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceSubpassShadingFeaturesHUAWEI &operator=(PhysicalDeviceSubpassShadingFeaturesHUAWEI const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSubpassShadingFeaturesHUAWEI &operator=(VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassShadingFeaturesHUAWEI &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassShadingFeaturesHUAWEI &setSubpassShading(VULKAN_HPP_NAMESPACE::Bool32 subpassShading_) VULKAN_HPP_NOEXCEPT { subpassShading = subpassShading_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceSubpassShadingFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, subpassShading); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceSubpassShadingFeaturesHUAWEI const &) const = default; #else bool operator==(PhysicalDeviceSubpassShadingFeaturesHUAWEI const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (subpassShading == rhs.subpassShading); # endif } bool operator!=(PhysicalDeviceSubpassShadingFeaturesHUAWEI const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 subpassShading = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI) == sizeof(VkPhysicalDeviceSubpassShadingFeaturesHUAWEI), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceSubpassShadingFeaturesHUAWEI is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceSubpassShadingFeaturesHUAWEI; }; struct PhysicalDeviceSubpassShadingPropertiesHUAWEI { using NativeType = VkPhysicalDeviceSubpassShadingPropertiesHUAWEI; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingPropertiesHUAWEI(uint32_t maxSubpassShadingWorkgroupSizeAspectRatio_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), maxSubpassShadingWorkgroupSizeAspectRatio(maxSubpassShadingWorkgroupSizeAspectRatio_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingPropertiesHUAWEI(PhysicalDeviceSubpassShadingPropertiesHUAWEI const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSubpassShadingPropertiesHUAWEI(VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceSubpassShadingPropertiesHUAWEI(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceSubpassShadingPropertiesHUAWEI &operator=(PhysicalDeviceSubpassShadingPropertiesHUAWEI const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSubpassShadingPropertiesHUAWEI &operator=(VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceSubpassShadingPropertiesHUAWEI &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, maxSubpassShadingWorkgroupSizeAspectRatio); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceSubpassShadingPropertiesHUAWEI const &) const = default; #else bool operator==(PhysicalDeviceSubpassShadingPropertiesHUAWEI const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (maxSubpassShadingWorkgroupSizeAspectRatio == rhs.maxSubpassShadingWorkgroupSizeAspectRatio); # endif } bool operator!=(PhysicalDeviceSubpassShadingPropertiesHUAWEI const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI; void *pNext = {}; uint32_t maxSubpassShadingWorkgroupSizeAspectRatio = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI) == sizeof(VkPhysicalDeviceSubpassShadingPropertiesHUAWEI), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceSubpassShadingPropertiesHUAWEI is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceSubpassShadingPropertiesHUAWEI; }; struct PhysicalDeviceSurfaceInfo2KHR { using NativeType = VkPhysicalDeviceSurfaceInfo2KHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSurfaceInfo2KHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), surface(surface_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR(PhysicalDeviceSurfaceInfo2KHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSurfaceInfo2KHR(VkPhysicalDeviceSurfaceInfo2KHR const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceSurfaceInfo2KHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceSurfaceInfo2KHR &operator=(PhysicalDeviceSurfaceInfo2KHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSurfaceInfo2KHR &operator=(VkPhysicalDeviceSurfaceInfo2KHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSurfaceInfo2KHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSurfaceInfo2KHR &setSurface(VULKAN_HPP_NAMESPACE::SurfaceKHR surface_) VULKAN_HPP_NOEXCEPT { surface = surface_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceSurfaceInfo2KHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceSurfaceInfo2KHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, surface); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceSurfaceInfo2KHR const &) const = default; #else bool operator==(PhysicalDeviceSurfaceInfo2KHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (surface == rhs.surface); # endif } bool operator!=(PhysicalDeviceSurfaceInfo2KHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR) == sizeof(VkPhysicalDeviceSurfaceInfo2KHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceSurfaceInfo2KHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceSurfaceInfo2KHR; }; struct PhysicalDeviceSynchronization2Features { using NativeType = VkPhysicalDeviceSynchronization2Features; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSynchronization2Features; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2Features(VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), synchronization2(synchronization2_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2Features(PhysicalDeviceSynchronization2Features const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSynchronization2Features(VkPhysicalDeviceSynchronization2Features const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceSynchronization2Features(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceSynchronization2Features &operator=(PhysicalDeviceSynchronization2Features const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSynchronization2Features &operator=(VkPhysicalDeviceSynchronization2Features const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSynchronization2Features &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSynchronization2Features &setSynchronization2(VULKAN_HPP_NAMESPACE::Bool32 synchronization2_) VULKAN_HPP_NOEXCEPT { synchronization2 = synchronization2_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceSynchronization2Features const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceSynchronization2Features &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, synchronization2); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceSynchronization2Features const &) const = default; #else bool operator==(PhysicalDeviceSynchronization2Features const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (synchronization2 == rhs.synchronization2); # endif } bool operator!=(PhysicalDeviceSynchronization2Features const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSynchronization2Features; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 synchronization2 = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features) == sizeof(VkPhysicalDeviceSynchronization2Features), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceSynchronization2Features is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceSynchronization2Features; }; using PhysicalDeviceSynchronization2FeaturesKHR = PhysicalDeviceSynchronization2Features; struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT { using NativeType = VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), texelBufferAlignment(texelBufferAlignment_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT(PhysicalDeviceTexelBufferAlignmentFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceTexelBufferAlignmentFeaturesEXT(VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceTexelBufferAlignmentFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceTexelBufferAlignmentFeaturesEXT &operator=(PhysicalDeviceTexelBufferAlignmentFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceTexelBufferAlignmentFeaturesEXT &operator=(VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setTexelBufferAlignment(VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_) VULKAN_HPP_NOEXCEPT { texelBufferAlignment = texelBufferAlignment_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, texelBufferAlignment); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceTexelBufferAlignmentFeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceTexelBufferAlignmentFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (texelBufferAlignment == rhs.texelBufferAlignment); # endif } bool operator!=(PhysicalDeviceTexelBufferAlignmentFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT) == sizeof(VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceTexelBufferAlignmentFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceTexelBufferAlignmentFeaturesEXT; }; struct PhysicalDeviceTexelBufferAlignmentProperties { using NativeType = VkPhysicalDeviceTexelBufferAlignmentProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTexelBufferAlignmentProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentProperties(VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), storageTexelBufferOffsetAlignmentBytes(storageTexelBufferOffsetAlignmentBytes_), storageTexelBufferOffsetSingleTexelAlignment(storageTexelBufferOffsetSingleTexelAlignment_), uniformTexelBufferOffsetAlignmentBytes(uniformTexelBufferOffsetAlignmentBytes_), uniformTexelBufferOffsetSingleTexelAlignment(uniformTexelBufferOffsetSingleTexelAlignment_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentProperties(PhysicalDeviceTexelBufferAlignmentProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceTexelBufferAlignmentProperties(VkPhysicalDeviceTexelBufferAlignmentProperties const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceTexelBufferAlignmentProperties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceTexelBufferAlignmentProperties &operator=(PhysicalDeviceTexelBufferAlignmentProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceTexelBufferAlignmentProperties &operator=(VkPhysicalDeviceTexelBufferAlignmentProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceTexelBufferAlignmentProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceTexelBufferAlignmentProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, storageTexelBufferOffsetAlignmentBytes, storageTexelBufferOffsetSingleTexelAlignment, uniformTexelBufferOffsetAlignmentBytes, uniformTexelBufferOffsetSingleTexelAlignment); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceTexelBufferAlignmentProperties const &) const = default; #else bool operator==(PhysicalDeviceTexelBufferAlignmentProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes) && (storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment) && (uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes) && (uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment); # endif } bool operator!=(PhysicalDeviceTexelBufferAlignmentProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentProperties; void *pNext = {}; VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {}; VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {}; VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {}; VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties) == sizeof(VkPhysicalDeviceTexelBufferAlignmentProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceTexelBufferAlignmentProperties is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceTexelBufferAlignmentProperties; }; using PhysicalDeviceTexelBufferAlignmentPropertiesEXT = PhysicalDeviceTexelBufferAlignmentProperties; struct PhysicalDeviceTextureCompressionASTCHDRFeatures { using NativeType = VkPhysicalDeviceTextureCompressionASTCHDRFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeatures(VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), textureCompressionASTC_HDR(textureCompressionASTC_HDR_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeatures(PhysicalDeviceTextureCompressionASTCHDRFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceTextureCompressionASTCHDRFeatures(VkPhysicalDeviceTextureCompressionASTCHDRFeatures const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceTextureCompressionASTCHDRFeatures(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceTextureCompressionASTCHDRFeatures &operator=(PhysicalDeviceTextureCompressionASTCHDRFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceTextureCompressionASTCHDRFeatures &operator=(VkPhysicalDeviceTextureCompressionASTCHDRFeatures const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTextureCompressionASTCHDRFeatures &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTextureCompressionASTCHDRFeatures & setTextureCompressionASTC_HDR(VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_) VULKAN_HPP_NOEXCEPT { textureCompressionASTC_HDR = textureCompressionASTC_HDR_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, textureCompressionASTC_HDR); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceTextureCompressionASTCHDRFeatures const &) const = default; #else bool operator==(PhysicalDeviceTextureCompressionASTCHDRFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR); # endif } bool operator!=(PhysicalDeviceTextureCompressionASTCHDRFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures) == sizeof(VkPhysicalDeviceTextureCompressionASTCHDRFeatures), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceTextureCompressionASTCHDRFeatures is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceTextureCompressionASTCHDRFeatures; }; using PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT = PhysicalDeviceTextureCompressionASTCHDRFeatures; struct PhysicalDeviceTimelineSemaphoreFeatures { using NativeType = VkPhysicalDeviceTimelineSemaphoreFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures(VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), timelineSemaphore(timelineSemaphore_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures(PhysicalDeviceTimelineSemaphoreFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceTimelineSemaphoreFeatures(VkPhysicalDeviceTimelineSemaphoreFeatures const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceTimelineSemaphoreFeatures(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceTimelineSemaphoreFeatures &operator=(PhysicalDeviceTimelineSemaphoreFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceTimelineSemaphoreFeatures &operator=(VkPhysicalDeviceTimelineSemaphoreFeatures const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTimelineSemaphoreFeatures &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTimelineSemaphoreFeatures & setTimelineSemaphore(VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_) VULKAN_HPP_NOEXCEPT { timelineSemaphore = timelineSemaphore_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceTimelineSemaphoreFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceTimelineSemaphoreFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, timelineSemaphore); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceTimelineSemaphoreFeatures const &) const = default; #else bool operator==(PhysicalDeviceTimelineSemaphoreFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (timelineSemaphore == rhs.timelineSemaphore); # endif } bool operator!=(PhysicalDeviceTimelineSemaphoreFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures) == sizeof(VkPhysicalDeviceTimelineSemaphoreFeatures), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceTimelineSemaphoreFeatures is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceTimelineSemaphoreFeatures; }; using PhysicalDeviceTimelineSemaphoreFeaturesKHR = PhysicalDeviceTimelineSemaphoreFeatures; struct PhysicalDeviceTimelineSemaphoreProperties { using NativeType = VkPhysicalDeviceTimelineSemaphoreProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties(uint64_t maxTimelineSemaphoreValueDifference_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), maxTimelineSemaphoreValueDifference(maxTimelineSemaphoreValueDifference_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties(PhysicalDeviceTimelineSemaphoreProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceTimelineSemaphoreProperties(VkPhysicalDeviceTimelineSemaphoreProperties const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceTimelineSemaphoreProperties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceTimelineSemaphoreProperties &operator=(PhysicalDeviceTimelineSemaphoreProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceTimelineSemaphoreProperties &operator=(VkPhysicalDeviceTimelineSemaphoreProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceTimelineSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceTimelineSemaphoreProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, maxTimelineSemaphoreValueDifference); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceTimelineSemaphoreProperties const &) const = default; #else bool operator==(PhysicalDeviceTimelineSemaphoreProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference); # endif } bool operator!=(PhysicalDeviceTimelineSemaphoreProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties; void *pNext = {}; uint64_t maxTimelineSemaphoreValueDifference = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties) == sizeof(VkPhysicalDeviceTimelineSemaphoreProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceTimelineSemaphoreProperties is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceTimelineSemaphoreProperties; }; using PhysicalDeviceTimelineSemaphorePropertiesKHR = PhysicalDeviceTimelineSemaphoreProperties; struct PhysicalDeviceToolProperties { using NativeType = VkPhysicalDeviceToolProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceToolProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolProperties(std::array const &name_ = {}, std::array const &version_ = {}, VULKAN_HPP_NAMESPACE::ToolPurposeFlags purposes_ = {}, std::array const &description_ = {}, std::array const &layer_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), name(name_), version(version_), purposes(purposes_), description(description_), layer(layer_) { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolProperties(PhysicalDeviceToolProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceToolProperties(VkPhysicalDeviceToolProperties const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceToolProperties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceToolProperties &operator=(PhysicalDeviceToolProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceToolProperties &operator=(VkPhysicalDeviceToolProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceToolProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceToolProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, VULKAN_HPP_NAMESPACE::ToolPurposeFlags const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, name, version, purposes, description, layer); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceToolProperties const &) const = default; #else bool operator==(PhysicalDeviceToolProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (name == rhs.name) && (version == rhs.version) && (purposes == rhs.purposes) && (description == rhs.description) && (layer == rhs.layer); # endif } bool operator!=(PhysicalDeviceToolProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceToolProperties; void *pNext = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D version = {}; VULKAN_HPP_NAMESPACE::ToolPurposeFlags purposes = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D layer = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties) == sizeof(VkPhysicalDeviceToolProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceToolProperties is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceToolProperties; }; using PhysicalDeviceToolPropertiesEXT = PhysicalDeviceToolProperties; struct PhysicalDeviceTransformFeedbackFeaturesEXT { using NativeType = VkPhysicalDeviceTransformFeedbackFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ = {}, VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), transformFeedback(transformFeedback_), geometryStreams(geometryStreams_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT(PhysicalDeviceTransformFeedbackFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceTransformFeedbackFeaturesEXT(VkPhysicalDeviceTransformFeedbackFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceTransformFeedbackFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceTransformFeedbackFeaturesEXT &operator=(PhysicalDeviceTransformFeedbackFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceTransformFeedbackFeaturesEXT &operator=(VkPhysicalDeviceTransformFeedbackFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & setTransformFeedback(VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_) VULKAN_HPP_NOEXCEPT { transformFeedback = transformFeedback_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT &setGeometryStreams(VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_) VULKAN_HPP_NOEXCEPT { geometryStreams = geometryStreams_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceTransformFeedbackFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, transformFeedback, geometryStreams); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceTransformFeedbackFeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceTransformFeedbackFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (transformFeedback == rhs.transformFeedback) && (geometryStreams == rhs.geometryStreams); # endif } bool operator!=(PhysicalDeviceTransformFeedbackFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 transformFeedback = {}; VULKAN_HPP_NAMESPACE::Bool32 geometryStreams = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT) == sizeof(VkPhysicalDeviceTransformFeedbackFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceTransformFeedbackFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceTransformFeedbackFeaturesEXT; }; struct PhysicalDeviceTransformFeedbackPropertiesEXT { using NativeType = VkPhysicalDeviceTransformFeedbackPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT(uint32_t maxTransformFeedbackStreams_ = {}, uint32_t maxTransformFeedbackBuffers_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize_ = {}, uint32_t maxTransformFeedbackStreamDataSize_ = {}, uint32_t maxTransformFeedbackBufferDataSize_ = {}, uint32_t maxTransformFeedbackBufferDataStride_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), maxTransformFeedbackStreams(maxTransformFeedbackStreams_), maxTransformFeedbackBuffers(maxTransformFeedbackBuffers_), maxTransformFeedbackBufferSize(maxTransformFeedbackBufferSize_), maxTransformFeedbackStreamDataSize(maxTransformFeedbackStreamDataSize_), maxTransformFeedbackBufferDataSize(maxTransformFeedbackBufferDataSize_), maxTransformFeedbackBufferDataStride(maxTransformFeedbackBufferDataStride_), transformFeedbackQueries(transformFeedbackQueries_), transformFeedbackStreamsLinesTriangles(transformFeedbackStreamsLinesTriangles_), transformFeedbackRasterizationStreamSelect(transformFeedbackRasterizationStreamSelect_), transformFeedbackDraw(transformFeedbackDraw_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT(PhysicalDeviceTransformFeedbackPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceTransformFeedbackPropertiesEXT(VkPhysicalDeviceTransformFeedbackPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceTransformFeedbackPropertiesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceTransformFeedbackPropertiesEXT &operator=(PhysicalDeviceTransformFeedbackPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceTransformFeedbackPropertiesEXT &operator=(VkPhysicalDeviceTransformFeedbackPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceTransformFeedbackPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, maxTransformFeedbackStreams, maxTransformFeedbackBuffers, maxTransformFeedbackBufferSize, maxTransformFeedbackStreamDataSize, maxTransformFeedbackBufferDataSize, maxTransformFeedbackBufferDataStride, transformFeedbackQueries, transformFeedbackStreamsLinesTriangles, transformFeedbackRasterizationStreamSelect, transformFeedbackDraw); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceTransformFeedbackPropertiesEXT const &) const = default; #else bool operator==(PhysicalDeviceTransformFeedbackPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (maxTransformFeedbackStreams == rhs.maxTransformFeedbackStreams) && (maxTransformFeedbackBuffers == rhs.maxTransformFeedbackBuffers) && (maxTransformFeedbackBufferSize == rhs.maxTransformFeedbackBufferSize) && (maxTransformFeedbackStreamDataSize == rhs.maxTransformFeedbackStreamDataSize) && (maxTransformFeedbackBufferDataSize == rhs.maxTransformFeedbackBufferDataSize) && (maxTransformFeedbackBufferDataStride == rhs.maxTransformFeedbackBufferDataStride) && (transformFeedbackQueries == rhs.transformFeedbackQueries) && (transformFeedbackStreamsLinesTriangles == rhs.transformFeedbackStreamsLinesTriangles) && (transformFeedbackRasterizationStreamSelect == rhs.transformFeedbackRasterizationStreamSelect) && (transformFeedbackDraw == rhs.transformFeedbackDraw); # endif } bool operator!=(PhysicalDeviceTransformFeedbackPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT; void *pNext = {}; uint32_t maxTransformFeedbackStreams = {}; uint32_t maxTransformFeedbackBuffers = {}; VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize = {}; uint32_t maxTransformFeedbackStreamDataSize = {}; uint32_t maxTransformFeedbackBufferDataSize = {}; uint32_t maxTransformFeedbackBufferDataStride = {}; VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries = {}; VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles = {}; VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect = {}; VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT) == sizeof(VkPhysicalDeviceTransformFeedbackPropertiesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceTransformFeedbackPropertiesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceTransformFeedbackPropertiesEXT; }; struct PhysicalDeviceUniformBufferStandardLayoutFeatures { using NativeType = VkPhysicalDeviceUniformBufferStandardLayoutFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures(VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), uniformBufferStandardLayout(uniformBufferStandardLayout_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures(PhysicalDeviceUniformBufferStandardLayoutFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceUniformBufferStandardLayoutFeatures(VkPhysicalDeviceUniformBufferStandardLayoutFeatures const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceUniformBufferStandardLayoutFeatures(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceUniformBufferStandardLayoutFeatures & operator=(PhysicalDeviceUniformBufferStandardLayoutFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceUniformBufferStandardLayoutFeatures &operator=(VkPhysicalDeviceUniformBufferStandardLayoutFeatures const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures & setUniformBufferStandardLayout(VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_) VULKAN_HPP_NOEXCEPT { uniformBufferStandardLayout = uniformBufferStandardLayout_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, uniformBufferStandardLayout); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceUniformBufferStandardLayoutFeatures const &) const = default; #else bool operator==(PhysicalDeviceUniformBufferStandardLayoutFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (uniformBufferStandardLayout == rhs.uniformBufferStandardLayout); # endif } bool operator!=(PhysicalDeviceUniformBufferStandardLayoutFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures) == sizeof(VkPhysicalDeviceUniformBufferStandardLayoutFeatures), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceUniformBufferStandardLayoutFeatures is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceUniformBufferStandardLayoutFeatures; }; using PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = PhysicalDeviceUniformBufferStandardLayoutFeatures; struct PhysicalDeviceVariablePointersFeatures { using NativeType = VkPhysicalDeviceVariablePointersFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVariablePointersFeatures; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures(VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), variablePointersStorageBuffer(variablePointersStorageBuffer_), variablePointers(variablePointers_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures(PhysicalDeviceVariablePointersFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVariablePointersFeatures(VkPhysicalDeviceVariablePointersFeatures const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceVariablePointersFeatures(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceVariablePointersFeatures &operator=(PhysicalDeviceVariablePointersFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVariablePointersFeatures &operator=(VkPhysicalDeviceVariablePointersFeatures const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & setVariablePointersStorageBuffer(VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_) VULKAN_HPP_NOEXCEPT { variablePointersStorageBuffer = variablePointersStorageBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures &setVariablePointers(VULKAN_HPP_NAMESPACE::Bool32 variablePointers_) VULKAN_HPP_NOEXCEPT { variablePointers = variablePointers_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceVariablePointersFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceVariablePointersFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, variablePointersStorageBuffer, variablePointers); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceVariablePointersFeatures const &) const = default; #else bool operator==(PhysicalDeviceVariablePointersFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (variablePointersStorageBuffer == rhs.variablePointersStorageBuffer) && (variablePointers == rhs.variablePointers); # endif } bool operator!=(PhysicalDeviceVariablePointersFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVariablePointersFeatures; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {}; VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures) == sizeof(VkPhysicalDeviceVariablePointersFeatures), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceVariablePointersFeatures is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceVariablePointersFeatures; }; using PhysicalDeviceVariablePointerFeatures = PhysicalDeviceVariablePointersFeatures; using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures; using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures; struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT { using NativeType = VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), vertexAttributeInstanceRateDivisor(vertexAttributeInstanceRateDivisor_), vertexAttributeInstanceRateZeroDivisor(vertexAttributeInstanceRateZeroDivisor_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT(PhysicalDeviceVertexAttributeDivisorFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVertexAttributeDivisorFeaturesEXT(VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceVertexAttributeDivisorFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceVertexAttributeDivisorFeaturesEXT &operator=(PhysicalDeviceVertexAttributeDivisorFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVertexAttributeDivisorFeaturesEXT &operator=(VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateDivisor(VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_) VULKAN_HPP_NOEXCEPT { vertexAttributeInstanceRateDivisor = vertexAttributeInstanceRateDivisor_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateZeroDivisor(VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_) VULKAN_HPP_NOEXCEPT { vertexAttributeInstanceRateZeroDivisor = vertexAttributeInstanceRateZeroDivisor_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, vertexAttributeInstanceRateDivisor, vertexAttributeInstanceRateZeroDivisor); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceVertexAttributeDivisorFeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceVertexAttributeDivisorFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor) && (vertexAttributeInstanceRateZeroDivisor == rhs.vertexAttributeInstanceRateZeroDivisor); # endif } bool operator!=(PhysicalDeviceVertexAttributeDivisorFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor = {}; VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT) == sizeof(VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceVertexAttributeDivisorFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceVertexAttributeDivisorFeaturesEXT; }; struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT { using NativeType = VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT(uint32_t maxVertexAttribDivisor_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), maxVertexAttribDivisor(maxVertexAttribDivisor_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT(PhysicalDeviceVertexAttributeDivisorPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVertexAttributeDivisorPropertiesEXT(VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceVertexAttributeDivisorPropertiesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceVertexAttributeDivisorPropertiesEXT & operator=(PhysicalDeviceVertexAttributeDivisorPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVertexAttributeDivisorPropertiesEXT &operator=(VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, maxVertexAttribDivisor); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceVertexAttributeDivisorPropertiesEXT const &) const = default; #else bool operator==(PhysicalDeviceVertexAttributeDivisorPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (maxVertexAttribDivisor == rhs.maxVertexAttribDivisor); # endif } bool operator!=(PhysicalDeviceVertexAttributeDivisorPropertiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT; void *pNext = {}; uint32_t maxVertexAttribDivisor = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT) == sizeof(VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceVertexAttributeDivisorPropertiesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceVertexAttributeDivisorPropertiesEXT; }; struct PhysicalDeviceVertexInputDynamicStateFeaturesEXT { using NativeType = VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexInputDynamicStateFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), vertexInputDynamicState(vertexInputDynamicState_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexInputDynamicStateFeaturesEXT(PhysicalDeviceVertexInputDynamicStateFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVertexInputDynamicStateFeaturesEXT(VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceVertexInputDynamicStateFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceVertexInputDynamicStateFeaturesEXT &operator=(PhysicalDeviceVertexInputDynamicStateFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVertexInputDynamicStateFeaturesEXT &operator=(VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexInputDynamicStateFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexInputDynamicStateFeaturesEXT & setVertexInputDynamicState(VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_) VULKAN_HPP_NOEXCEPT { vertexInputDynamicState = vertexInputDynamicState_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, vertexInputDynamicState); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceVertexInputDynamicStateFeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceVertexInputDynamicStateFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (vertexInputDynamicState == rhs.vertexInputDynamicState); # endif } bool operator!=(PhysicalDeviceVertexInputDynamicStateFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT) == sizeof(VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceVertexInputDynamicStateFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceVertexInputDynamicStateFeaturesEXT; }; #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoProfileKHR { using NativeType = VkVideoProfileKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoProfileKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoProfileKHR( VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation_ = VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR::eInvalid, VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling_ = {}, VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth_ = {}, VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), videoCodecOperation(videoCodecOperation_), chromaSubsampling(chromaSubsampling_), lumaBitDepth(lumaBitDepth_), chromaBitDepth(chromaBitDepth_) { } VULKAN_HPP_CONSTEXPR VideoProfileKHR(VideoProfileKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoProfileKHR(VkVideoProfileKHR const &rhs) VULKAN_HPP_NOEXCEPT : VideoProfileKHR(*reinterpret_cast(&rhs)) {} # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoProfileKHR &operator=(VideoProfileKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoProfileKHR &operator=(VkVideoProfileKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoProfileKHR &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoProfileKHR & setVideoCodecOperation(VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation_) VULKAN_HPP_NOEXCEPT { videoCodecOperation = videoCodecOperation_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoProfileKHR & setChromaSubsampling(VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling_) VULKAN_HPP_NOEXCEPT { chromaSubsampling = chromaSubsampling_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoProfileKHR &setLumaBitDepth(VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth_) VULKAN_HPP_NOEXCEPT { lumaBitDepth = lumaBitDepth_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoProfileKHR &setChromaBitDepth(VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth_) VULKAN_HPP_NOEXCEPT { chromaBitDepth = chromaBitDepth_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoProfileKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoProfileKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, videoCodecOperation, chromaSubsampling, lumaBitDepth, chromaBitDepth); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoProfileKHR const &) const = default; # else bool operator==(VideoProfileKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (videoCodecOperation == rhs.videoCodecOperation) && (chromaSubsampling == rhs.chromaSubsampling) && (lumaBitDepth == rhs.lumaBitDepth) && (chromaBitDepth == rhs.chromaBitDepth); # endif } bool operator!=(VideoProfileKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoProfileKHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation = VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR::eInvalid; VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling = {}; VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth = {}; VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoProfileKHR) == sizeof(VkVideoProfileKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoProfileKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoProfileKHR; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoProfilesKHR { using NativeType = VkVideoProfilesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoProfilesKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoProfilesKHR(uint32_t profileCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoProfileKHR *pProfiles_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), profileCount(profileCount_), pProfiles(pProfiles_) { } VULKAN_HPP_CONSTEXPR VideoProfilesKHR(VideoProfilesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoProfilesKHR(VkVideoProfilesKHR const &rhs) VULKAN_HPP_NOEXCEPT : VideoProfilesKHR(*reinterpret_cast(&rhs)) {} # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoProfilesKHR(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &profiles_, void *pNext_ = nullptr) : pNext(pNext_) , profileCount(static_cast(profiles_.size())) , pProfiles(profiles_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoProfilesKHR &operator=(VideoProfilesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoProfilesKHR &operator=(VkVideoProfilesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoProfilesKHR &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoProfilesKHR &setProfileCount(uint32_t profileCount_) VULKAN_HPP_NOEXCEPT { profileCount = profileCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoProfilesKHR &setPProfiles(const VULKAN_HPP_NAMESPACE::VideoProfileKHR *pProfiles_) VULKAN_HPP_NOEXCEPT { pProfiles = pProfiles_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoProfilesKHR & setProfiles(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &profiles_) VULKAN_HPP_NOEXCEPT { profileCount = static_cast(profiles_.size()); pProfiles = profiles_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoProfilesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoProfilesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, profileCount, pProfiles); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoProfilesKHR const &) const = default; # else bool operator==(VideoProfilesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (profileCount == rhs.profileCount) && (pProfiles == rhs.pProfiles); # endif } bool operator!=(VideoProfilesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoProfilesKHR; void *pNext = {}; uint32_t profileCount = {}; const VULKAN_HPP_NAMESPACE::VideoProfileKHR *pProfiles = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoProfilesKHR) == sizeof(VkVideoProfilesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoProfilesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoProfilesKHR; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct PhysicalDeviceVideoFormatInfoKHR { using NativeType = VkPhysicalDeviceVideoFormatInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoFormatInfoKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoFormatInfoKHR(VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, const VULKAN_HPP_NAMESPACE::VideoProfilesKHR *pVideoProfiles_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), imageUsage(imageUsage_), pVideoProfiles(pVideoProfiles_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoFormatInfoKHR(PhysicalDeviceVideoFormatInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVideoFormatInfoKHR(VkPhysicalDeviceVideoFormatInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceVideoFormatInfoKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceVideoFormatInfoKHR &operator=(PhysicalDeviceVideoFormatInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVideoFormatInfoKHR &operator=(VkPhysicalDeviceVideoFormatInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceVideoFormatInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceVideoFormatInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, imageUsage, pVideoProfiles); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceVideoFormatInfoKHR const &) const = default; # else bool operator==(PhysicalDeviceVideoFormatInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (imageUsage == rhs.imageUsage) && (pVideoProfiles == rhs.pVideoProfiles); # endif } bool operator!=(PhysicalDeviceVideoFormatInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoFormatInfoKHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {}; const VULKAN_HPP_NAMESPACE::VideoProfilesKHR *pVideoProfiles = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR) == sizeof(VkPhysicalDeviceVideoFormatInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceVideoFormatInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceVideoFormatInfoKHR; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PhysicalDeviceVulkan11Features { using NativeType = VkPhysicalDeviceVulkan11Features; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Features; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features(VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}, VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), storageBuffer16BitAccess(storageBuffer16BitAccess_), uniformAndStorageBuffer16BitAccess(uniformAndStorageBuffer16BitAccess_), storagePushConstant16(storagePushConstant16_), storageInputOutput16(storageInputOutput16_), multiview(multiview_), multiviewGeometryShader(multiviewGeometryShader_), multiviewTessellationShader(multiviewTessellationShader_), variablePointersStorageBuffer(variablePointersStorageBuffer_), variablePointers(variablePointers_), protectedMemory(protectedMemory_), samplerYcbcrConversion(samplerYcbcrConversion_), shaderDrawParameters(shaderDrawParameters_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features(PhysicalDeviceVulkan11Features const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVulkan11Features(VkPhysicalDeviceVulkan11Features const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceVulkan11Features(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceVulkan11Features &operator=(PhysicalDeviceVulkan11Features const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVulkan11Features &operator=(VkPhysicalDeviceVulkan11Features const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setStorageBuffer16BitAccess(VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_) VULKAN_HPP_NOEXCEPT { storageBuffer16BitAccess = storageBuffer16BitAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setUniformAndStorageBuffer16BitAccess(VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_) VULKAN_HPP_NOEXCEPT { uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features &setStoragePushConstant16(VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_) VULKAN_HPP_NOEXCEPT { storagePushConstant16 = storagePushConstant16_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features &setStorageInputOutput16(VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_) VULKAN_HPP_NOEXCEPT { storageInputOutput16 = storageInputOutput16_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features &setMultiview(VULKAN_HPP_NAMESPACE::Bool32 multiview_) VULKAN_HPP_NOEXCEPT { multiview = multiview_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setMultiviewGeometryShader(VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_) VULKAN_HPP_NOEXCEPT { multiviewGeometryShader = multiviewGeometryShader_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setMultiviewTessellationShader(VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_) VULKAN_HPP_NOEXCEPT { multiviewTessellationShader = multiviewTessellationShader_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setVariablePointersStorageBuffer(VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_) VULKAN_HPP_NOEXCEPT { variablePointersStorageBuffer = variablePointersStorageBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features &setVariablePointers(VULKAN_HPP_NAMESPACE::Bool32 variablePointers_) VULKAN_HPP_NOEXCEPT { variablePointers = variablePointers_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features &setProtectedMemory(VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_) VULKAN_HPP_NOEXCEPT { protectedMemory = protectedMemory_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setSamplerYcbcrConversion(VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_) VULKAN_HPP_NOEXCEPT { samplerYcbcrConversion = samplerYcbcrConversion_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features &setShaderDrawParameters(VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_) VULKAN_HPP_NOEXCEPT { shaderDrawParameters = shaderDrawParameters_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceVulkan11Features const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceVulkan11Features &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, storageBuffer16BitAccess, uniformAndStorageBuffer16BitAccess, storagePushConstant16, storageInputOutput16, multiview, multiviewGeometryShader, multiviewTessellationShader, variablePointersStorageBuffer, variablePointers, protectedMemory, samplerYcbcrConversion, shaderDrawParameters); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceVulkan11Features const &) const = default; #else bool operator==(PhysicalDeviceVulkan11Features const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (storageBuffer16BitAccess == rhs.storageBuffer16BitAccess) && (uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess) && (storagePushConstant16 == rhs.storagePushConstant16) && (storageInputOutput16 == rhs.storageInputOutput16) && (multiview == rhs.multiview) && (multiviewGeometryShader == rhs.multiviewGeometryShader) && (multiviewTessellationShader == rhs.multiviewTessellationShader) && (variablePointersStorageBuffer == rhs.variablePointersStorageBuffer) && (variablePointers == rhs.variablePointers) && (protectedMemory == rhs.protectedMemory) && (samplerYcbcrConversion == rhs.samplerYcbcrConversion) && (shaderDrawParameters == rhs.shaderDrawParameters); # endif } bool operator!=(PhysicalDeviceVulkan11Features const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Features; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {}; VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {}; VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {}; VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {}; VULKAN_HPP_NAMESPACE::Bool32 multiview = {}; VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {}; VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {}; VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {}; VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {}; VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {}; VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features) == sizeof(VkPhysicalDeviceVulkan11Features), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceVulkan11Features is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceVulkan11Features; }; struct PhysicalDeviceVulkan11Properties { using NativeType = VkPhysicalDeviceVulkan11Properties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Properties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties( std::array const &deviceUUID_ = {}, std::array const &driverUUID_ = {}, std::array const &deviceLUID_ = {}, uint32_t deviceNodeMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}, uint32_t subgroupSize_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages_ = {}, VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages_ = {}, VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes, uint32_t maxMultiviewViewCount_ = {}, uint32_t maxMultiviewInstanceIndex_ = {}, VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}, uint32_t maxPerSetDescriptors_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), deviceUUID(deviceUUID_), driverUUID(driverUUID_), deviceLUID(deviceLUID_), deviceNodeMask(deviceNodeMask_), deviceLUIDValid(deviceLUIDValid_), subgroupSize(subgroupSize_), subgroupSupportedStages(subgroupSupportedStages_), subgroupSupportedOperations(subgroupSupportedOperations_), subgroupQuadOperationsInAllStages(subgroupQuadOperationsInAllStages_), pointClippingBehavior(pointClippingBehavior_), maxMultiviewViewCount(maxMultiviewViewCount_), maxMultiviewInstanceIndex(maxMultiviewInstanceIndex_), protectedNoFault(protectedNoFault_), maxPerSetDescriptors(maxPerSetDescriptors_), maxMemoryAllocationSize(maxMemoryAllocationSize_) { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties(PhysicalDeviceVulkan11Properties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVulkan11Properties(VkPhysicalDeviceVulkan11Properties const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceVulkan11Properties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceVulkan11Properties &operator=(PhysicalDeviceVulkan11Properties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVulkan11Properties &operator=(VkPhysicalDeviceVulkan11Properties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceVulkan11Properties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceVulkan11Properties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::PointClippingBehavior const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, VULKAN_HPP_NAMESPACE::DeviceSize const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, deviceUUID, driverUUID, deviceLUID, deviceNodeMask, deviceLUIDValid, subgroupSize, subgroupSupportedStages, subgroupSupportedOperations, subgroupQuadOperationsInAllStages, pointClippingBehavior, maxMultiviewViewCount, maxMultiviewInstanceIndex, protectedNoFault, maxPerSetDescriptors, maxMemoryAllocationSize); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceVulkan11Properties const &) const = default; #else bool operator==(PhysicalDeviceVulkan11Properties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (deviceUUID == rhs.deviceUUID) && (driverUUID == rhs.driverUUID) && (deviceLUID == rhs.deviceLUID) && (deviceNodeMask == rhs.deviceNodeMask) && (deviceLUIDValid == rhs.deviceLUIDValid) && (subgroupSize == rhs.subgroupSize) && (subgroupSupportedStages == rhs.subgroupSupportedStages) && (subgroupSupportedOperations == rhs.subgroupSupportedOperations) && (subgroupQuadOperationsInAllStages == rhs.subgroupQuadOperationsInAllStages) && (pointClippingBehavior == rhs.pointClippingBehavior) && (maxMultiviewViewCount == rhs.maxMultiviewViewCount) && (maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex) && (protectedNoFault == rhs.protectedNoFault) && (maxPerSetDescriptors == rhs.maxPerSetDescriptors) && (maxMemoryAllocationSize == rhs.maxMemoryAllocationSize); # endif } bool operator!=(PhysicalDeviceVulkan11Properties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Properties; void *pNext = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceUUID = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverUUID = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceLUID = {}; uint32_t deviceNodeMask = {}; VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {}; uint32_t subgroupSize = {}; VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages = {}; VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations = {}; VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages = {}; VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes; uint32_t maxMultiviewViewCount = {}; uint32_t maxMultiviewInstanceIndex = {}; VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {}; uint32_t maxPerSetDescriptors = {}; VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties) == sizeof(VkPhysicalDeviceVulkan11Properties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceVulkan11Properties is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceVulkan11Properties; }; struct PhysicalDeviceVulkan12Features { using NativeType = VkPhysicalDeviceVulkan12Features; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Features; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features(VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ = {}, VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}, VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}, VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), samplerMirrorClampToEdge(samplerMirrorClampToEdge_), drawIndirectCount(drawIndirectCount_), storageBuffer8BitAccess(storageBuffer8BitAccess_), uniformAndStorageBuffer8BitAccess(uniformAndStorageBuffer8BitAccess_), storagePushConstant8(storagePushConstant8_), shaderBufferInt64Atomics(shaderBufferInt64Atomics_), shaderSharedInt64Atomics(shaderSharedInt64Atomics_), shaderFloat16(shaderFloat16_), shaderInt8(shaderInt8_), descriptorIndexing(descriptorIndexing_), shaderInputAttachmentArrayDynamicIndexing(shaderInputAttachmentArrayDynamicIndexing_), shaderUniformTexelBufferArrayDynamicIndexing(shaderUniformTexelBufferArrayDynamicIndexing_), shaderStorageTexelBufferArrayDynamicIndexing(shaderStorageTexelBufferArrayDynamicIndexing_), shaderUniformBufferArrayNonUniformIndexing(shaderUniformBufferArrayNonUniformIndexing_), shaderSampledImageArrayNonUniformIndexing(shaderSampledImageArrayNonUniformIndexing_), shaderStorageBufferArrayNonUniformIndexing(shaderStorageBufferArrayNonUniformIndexing_), shaderStorageImageArrayNonUniformIndexing(shaderStorageImageArrayNonUniformIndexing_), shaderInputAttachmentArrayNonUniformIndexing(shaderInputAttachmentArrayNonUniformIndexing_), shaderUniformTexelBufferArrayNonUniformIndexing(shaderUniformTexelBufferArrayNonUniformIndexing_), shaderStorageTexelBufferArrayNonUniformIndexing(shaderStorageTexelBufferArrayNonUniformIndexing_), descriptorBindingUniformBufferUpdateAfterBind(descriptorBindingUniformBufferUpdateAfterBind_), descriptorBindingSampledImageUpdateAfterBind(descriptorBindingSampledImageUpdateAfterBind_), descriptorBindingStorageImageUpdateAfterBind(descriptorBindingStorageImageUpdateAfterBind_), descriptorBindingStorageBufferUpdateAfterBind(descriptorBindingStorageBufferUpdateAfterBind_), descriptorBindingUniformTexelBufferUpdateAfterBind(descriptorBindingUniformTexelBufferUpdateAfterBind_), descriptorBindingStorageTexelBufferUpdateAfterBind(descriptorBindingStorageTexelBufferUpdateAfterBind_), descriptorBindingUpdateUnusedWhilePending(descriptorBindingUpdateUnusedWhilePending_), descriptorBindingPartiallyBound(descriptorBindingPartiallyBound_), descriptorBindingVariableDescriptorCount(descriptorBindingVariableDescriptorCount_), runtimeDescriptorArray(runtimeDescriptorArray_), samplerFilterMinmax(samplerFilterMinmax_), scalarBlockLayout(scalarBlockLayout_), imagelessFramebuffer(imagelessFramebuffer_), uniformBufferStandardLayout(uniformBufferStandardLayout_), shaderSubgroupExtendedTypes(shaderSubgroupExtendedTypes_), separateDepthStencilLayouts(separateDepthStencilLayouts_), hostQueryReset(hostQueryReset_), timelineSemaphore(timelineSemaphore_), bufferDeviceAddress(bufferDeviceAddress_), bufferDeviceAddressCaptureReplay(bufferDeviceAddressCaptureReplay_), bufferDeviceAddressMultiDevice(bufferDeviceAddressMultiDevice_), vulkanMemoryModel(vulkanMemoryModel_), vulkanMemoryModelDeviceScope(vulkanMemoryModelDeviceScope_), vulkanMemoryModelAvailabilityVisibilityChains(vulkanMemoryModelAvailabilityVisibilityChains_), shaderOutputViewportIndex(shaderOutputViewportIndex_), shaderOutputLayer(shaderOutputLayer_), subgroupBroadcastDynamicId(subgroupBroadcastDynamicId_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features(PhysicalDeviceVulkan12Features const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVulkan12Features(VkPhysicalDeviceVulkan12Features const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceVulkan12Features(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceVulkan12Features &operator=(PhysicalDeviceVulkan12Features const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVulkan12Features &operator=(VkPhysicalDeviceVulkan12Features const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setSamplerMirrorClampToEdge(VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_) VULKAN_HPP_NOEXCEPT { samplerMirrorClampToEdge = samplerMirrorClampToEdge_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &setDrawIndirectCount(VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_) VULKAN_HPP_NOEXCEPT { drawIndirectCount = drawIndirectCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setStorageBuffer8BitAccess(VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_) VULKAN_HPP_NOEXCEPT { storageBuffer8BitAccess = storageBuffer8BitAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setUniformAndStorageBuffer8BitAccess(VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_) VULKAN_HPP_NOEXCEPT { uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &setStoragePushConstant8(VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_) VULKAN_HPP_NOEXCEPT { storagePushConstant8 = storagePushConstant8_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderBufferInt64Atomics(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_) VULKAN_HPP_NOEXCEPT { shaderBufferInt64Atomics = shaderBufferInt64Atomics_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderSharedInt64Atomics(VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_) VULKAN_HPP_NOEXCEPT { shaderSharedInt64Atomics = shaderSharedInt64Atomics_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &setShaderFloat16(VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_) VULKAN_HPP_NOEXCEPT { shaderFloat16 = shaderFloat16_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &setShaderInt8(VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_) VULKAN_HPP_NOEXCEPT { shaderInt8 = shaderInt8_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &setDescriptorIndexing(VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_) VULKAN_HPP_NOEXCEPT { descriptorIndexing = descriptorIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderInputAttachmentArrayDynamicIndexing(VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_) VULKAN_HPP_NOEXCEPT { shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderUniformTexelBufferArrayDynamicIndexing(VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_) VULKAN_HPP_NOEXCEPT { shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderStorageTexelBufferArrayDynamicIndexing(VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_) VULKAN_HPP_NOEXCEPT { shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderUniformBufferArrayNonUniformIndexing(VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_) VULKAN_HPP_NOEXCEPT { shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderSampledImageArrayNonUniformIndexing(VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_) VULKAN_HPP_NOEXCEPT { shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderStorageBufferArrayNonUniformIndexing(VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_) VULKAN_HPP_NOEXCEPT { shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderStorageImageArrayNonUniformIndexing(VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_) VULKAN_HPP_NOEXCEPT { shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderInputAttachmentArrayNonUniformIndexing(VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_) VULKAN_HPP_NOEXCEPT { shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderUniformTexelBufferArrayNonUniformIndexing(VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_) VULKAN_HPP_NOEXCEPT { shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderStorageTexelBufferArrayNonUniformIndexing(VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_) VULKAN_HPP_NOEXCEPT { shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingUniformBufferUpdateAfterBind(VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_) VULKAN_HPP_NOEXCEPT { descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingSampledImageUpdateAfterBind(VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_) VULKAN_HPP_NOEXCEPT { descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingStorageImageUpdateAfterBind(VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_) VULKAN_HPP_NOEXCEPT { descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingStorageBufferUpdateAfterBind(VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_) VULKAN_HPP_NOEXCEPT { descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &setDescriptorBindingUniformTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_) VULKAN_HPP_NOEXCEPT { descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &setDescriptorBindingStorageTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_) VULKAN_HPP_NOEXCEPT { descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingUpdateUnusedWhilePending(VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_) VULKAN_HPP_NOEXCEPT { descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingPartiallyBound(VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_) VULKAN_HPP_NOEXCEPT { descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingVariableDescriptorCount(VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_) VULKAN_HPP_NOEXCEPT { descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setRuntimeDescriptorArray(VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_) VULKAN_HPP_NOEXCEPT { runtimeDescriptorArray = runtimeDescriptorArray_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &setSamplerFilterMinmax(VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_) VULKAN_HPP_NOEXCEPT { samplerFilterMinmax = samplerFilterMinmax_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &setScalarBlockLayout(VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_) VULKAN_HPP_NOEXCEPT { scalarBlockLayout = scalarBlockLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &setImagelessFramebuffer(VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_) VULKAN_HPP_NOEXCEPT { imagelessFramebuffer = imagelessFramebuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setUniformBufferStandardLayout(VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_) VULKAN_HPP_NOEXCEPT { uniformBufferStandardLayout = uniformBufferStandardLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderSubgroupExtendedTypes(VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_) VULKAN_HPP_NOEXCEPT { shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setSeparateDepthStencilLayouts(VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_) VULKAN_HPP_NOEXCEPT { separateDepthStencilLayouts = separateDepthStencilLayouts_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &setHostQueryReset(VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_) VULKAN_HPP_NOEXCEPT { hostQueryReset = hostQueryReset_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &setTimelineSemaphore(VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_) VULKAN_HPP_NOEXCEPT { timelineSemaphore = timelineSemaphore_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &setBufferDeviceAddress(VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_) VULKAN_HPP_NOEXCEPT { bufferDeviceAddress = bufferDeviceAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setBufferDeviceAddressCaptureReplay(VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_) VULKAN_HPP_NOEXCEPT { bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setBufferDeviceAddressMultiDevice(VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_) VULKAN_HPP_NOEXCEPT { bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &setVulkanMemoryModel(VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_) VULKAN_HPP_NOEXCEPT { vulkanMemoryModel = vulkanMemoryModel_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setVulkanMemoryModelDeviceScope(VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_) VULKAN_HPP_NOEXCEPT { vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setVulkanMemoryModelAvailabilityVisibilityChains(VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_) VULKAN_HPP_NOEXCEPT { vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderOutputViewportIndex(VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_) VULKAN_HPP_NOEXCEPT { shaderOutputViewportIndex = shaderOutputViewportIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &setShaderOutputLayer(VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_) VULKAN_HPP_NOEXCEPT { shaderOutputLayer = shaderOutputLayer_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setSubgroupBroadcastDynamicId(VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_) VULKAN_HPP_NOEXCEPT { subgroupBroadcastDynamicId = subgroupBroadcastDynamicId_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceVulkan12Features const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceVulkan12Features &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, samplerMirrorClampToEdge, drawIndirectCount, storageBuffer8BitAccess, uniformAndStorageBuffer8BitAccess, storagePushConstant8, shaderBufferInt64Atomics, shaderSharedInt64Atomics, shaderFloat16, shaderInt8, descriptorIndexing, shaderInputAttachmentArrayDynamicIndexing, shaderUniformTexelBufferArrayDynamicIndexing, shaderStorageTexelBufferArrayDynamicIndexing, shaderUniformBufferArrayNonUniformIndexing, shaderSampledImageArrayNonUniformIndexing, shaderStorageBufferArrayNonUniformIndexing, shaderStorageImageArrayNonUniformIndexing, shaderInputAttachmentArrayNonUniformIndexing, shaderUniformTexelBufferArrayNonUniformIndexing, shaderStorageTexelBufferArrayNonUniformIndexing, descriptorBindingUniformBufferUpdateAfterBind, descriptorBindingSampledImageUpdateAfterBind, descriptorBindingStorageImageUpdateAfterBind, descriptorBindingStorageBufferUpdateAfterBind, descriptorBindingUniformTexelBufferUpdateAfterBind, descriptorBindingStorageTexelBufferUpdateAfterBind, descriptorBindingUpdateUnusedWhilePending, descriptorBindingPartiallyBound, descriptorBindingVariableDescriptorCount, runtimeDescriptorArray, samplerFilterMinmax, scalarBlockLayout, imagelessFramebuffer, uniformBufferStandardLayout, shaderSubgroupExtendedTypes, separateDepthStencilLayouts, hostQueryReset, timelineSemaphore, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice, vulkanMemoryModel, vulkanMemoryModelDeviceScope, vulkanMemoryModelAvailabilityVisibilityChains, shaderOutputViewportIndex, shaderOutputLayer, subgroupBroadcastDynamicId); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceVulkan12Features const &) const = default; #else bool operator==(PhysicalDeviceVulkan12Features const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (samplerMirrorClampToEdge == rhs.samplerMirrorClampToEdge) && (drawIndirectCount == rhs.drawIndirectCount) && (storageBuffer8BitAccess == rhs.storageBuffer8BitAccess) && (uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess) && (storagePushConstant8 == rhs.storagePushConstant8) && (shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics) && (shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics) && (shaderFloat16 == rhs.shaderFloat16) && (shaderInt8 == rhs.shaderInt8) && (descriptorIndexing == rhs.descriptorIndexing) && (shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing) && (shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing) && (shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing) && (shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing) && (shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing) && (shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing) && (shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing) && (shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing) && (shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing) && (shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing) && (descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind) && (descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind) && (descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind) && (descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind) && (descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind) && (descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind) && (descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending) && (descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound) && (descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount) && (runtimeDescriptorArray == rhs.runtimeDescriptorArray) && (samplerFilterMinmax == rhs.samplerFilterMinmax) && (scalarBlockLayout == rhs.scalarBlockLayout) && (imagelessFramebuffer == rhs.imagelessFramebuffer) && (uniformBufferStandardLayout == rhs.uniformBufferStandardLayout) && (shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes) && (separateDepthStencilLayouts == rhs.separateDepthStencilLayouts) && (hostQueryReset == rhs.hostQueryReset) && (timelineSemaphore == rhs.timelineSemaphore) && (bufferDeviceAddress == rhs.bufferDeviceAddress) && (bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay) && (bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice) && (vulkanMemoryModel == rhs.vulkanMemoryModel) && (vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope) && (vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains) && (shaderOutputViewportIndex == rhs.shaderOutputViewportIndex) && (shaderOutputLayer == rhs.shaderOutputLayer) && (subgroupBroadcastDynamicId == rhs.subgroupBroadcastDynamicId); # endif } bool operator!=(PhysicalDeviceVulkan12Features const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Features; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge = {}; VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount = {}; VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {}; VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {}; VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {}; VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {}; VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {}; VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {}; VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {}; VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {}; VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {}; VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {}; VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {}; VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {}; VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {}; VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {}; VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax = {}; VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {}; VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {}; VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {}; VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {}; VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {}; VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {}; VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {}; VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {}; VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer = {}; VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features) == sizeof(VkPhysicalDeviceVulkan12Features), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceVulkan12Features is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceVulkan12Features; }; struct PhysicalDeviceVulkan12Properties { using NativeType = VkPhysicalDeviceVulkan12Properties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Properties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties( VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, std::array const &driverName_ = {}, std::array const &driverInfo_ = {}, VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}, uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, uint32_t maxPerStageUpdateAfterBindResources_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}, uint64_t maxTimelineSemaphoreValueDifference_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), driverID(driverID_), driverName(driverName_), driverInfo(driverInfo_), conformanceVersion(conformanceVersion_), denormBehaviorIndependence(denormBehaviorIndependence_), roundingModeIndependence(roundingModeIndependence_), shaderSignedZeroInfNanPreserveFloat16(shaderSignedZeroInfNanPreserveFloat16_), shaderSignedZeroInfNanPreserveFloat32(shaderSignedZeroInfNanPreserveFloat32_), shaderSignedZeroInfNanPreserveFloat64(shaderSignedZeroInfNanPreserveFloat64_), shaderDenormPreserveFloat16(shaderDenormPreserveFloat16_), shaderDenormPreserveFloat32(shaderDenormPreserveFloat32_), shaderDenormPreserveFloat64(shaderDenormPreserveFloat64_), shaderDenormFlushToZeroFloat16(shaderDenormFlushToZeroFloat16_), shaderDenormFlushToZeroFloat32(shaderDenormFlushToZeroFloat32_), shaderDenormFlushToZeroFloat64(shaderDenormFlushToZeroFloat64_), shaderRoundingModeRTEFloat16(shaderRoundingModeRTEFloat16_), shaderRoundingModeRTEFloat32(shaderRoundingModeRTEFloat32_), shaderRoundingModeRTEFloat64(shaderRoundingModeRTEFloat64_), shaderRoundingModeRTZFloat16(shaderRoundingModeRTZFloat16_), shaderRoundingModeRTZFloat32(shaderRoundingModeRTZFloat32_), shaderRoundingModeRTZFloat64(shaderRoundingModeRTZFloat64_), maxUpdateAfterBindDescriptorsInAllPools(maxUpdateAfterBindDescriptorsInAllPools_), shaderUniformBufferArrayNonUniformIndexingNative(shaderUniformBufferArrayNonUniformIndexingNative_), shaderSampledImageArrayNonUniformIndexingNative(shaderSampledImageArrayNonUniformIndexingNative_), shaderStorageBufferArrayNonUniformIndexingNative(shaderStorageBufferArrayNonUniformIndexingNative_), shaderStorageImageArrayNonUniformIndexingNative(shaderStorageImageArrayNonUniformIndexingNative_), shaderInputAttachmentArrayNonUniformIndexingNative(shaderInputAttachmentArrayNonUniformIndexingNative_), robustBufferAccessUpdateAfterBind(robustBufferAccessUpdateAfterBind_), quadDivergentImplicitLod(quadDivergentImplicitLod_), maxPerStageDescriptorUpdateAfterBindSamplers(maxPerStageDescriptorUpdateAfterBindSamplers_), maxPerStageDescriptorUpdateAfterBindUniformBuffers(maxPerStageDescriptorUpdateAfterBindUniformBuffers_), maxPerStageDescriptorUpdateAfterBindStorageBuffers(maxPerStageDescriptorUpdateAfterBindStorageBuffers_), maxPerStageDescriptorUpdateAfterBindSampledImages(maxPerStageDescriptorUpdateAfterBindSampledImages_), maxPerStageDescriptorUpdateAfterBindStorageImages(maxPerStageDescriptorUpdateAfterBindStorageImages_), maxPerStageDescriptorUpdateAfterBindInputAttachments(maxPerStageDescriptorUpdateAfterBindInputAttachments_), maxPerStageUpdateAfterBindResources(maxPerStageUpdateAfterBindResources_), maxDescriptorSetUpdateAfterBindSamplers(maxDescriptorSetUpdateAfterBindSamplers_), maxDescriptorSetUpdateAfterBindUniformBuffers(maxDescriptorSetUpdateAfterBindUniformBuffers_), maxDescriptorSetUpdateAfterBindUniformBuffersDynamic(maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_), maxDescriptorSetUpdateAfterBindStorageBuffers(maxDescriptorSetUpdateAfterBindStorageBuffers_), maxDescriptorSetUpdateAfterBindStorageBuffersDynamic(maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_), maxDescriptorSetUpdateAfterBindSampledImages(maxDescriptorSetUpdateAfterBindSampledImages_), maxDescriptorSetUpdateAfterBindStorageImages(maxDescriptorSetUpdateAfterBindStorageImages_), maxDescriptorSetUpdateAfterBindInputAttachments(maxDescriptorSetUpdateAfterBindInputAttachments_), supportedDepthResolveModes(supportedDepthResolveModes_), supportedStencilResolveModes(supportedStencilResolveModes_), independentResolveNone(independentResolveNone_), independentResolve(independentResolve_), filterMinmaxSingleComponentFormats(filterMinmaxSingleComponentFormats_), filterMinmaxImageComponentMapping(filterMinmaxImageComponentMapping_), maxTimelineSemaphoreValueDifference(maxTimelineSemaphoreValueDifference_), framebufferIntegerColorSampleCounts(framebufferIntegerColorSampleCounts_) { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties(PhysicalDeviceVulkan12Properties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVulkan12Properties(VkPhysicalDeviceVulkan12Properties const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceVulkan12Properties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceVulkan12Properties &operator=(PhysicalDeviceVulkan12Properties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVulkan12Properties &operator=(VkPhysicalDeviceVulkan12Properties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceVulkan12Properties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceVulkan12Properties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, VULKAN_HPP_NAMESPACE::ConformanceVersion const &, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence const &, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, VULKAN_HPP_NAMESPACE::ResolveModeFlags const &, VULKAN_HPP_NAMESPACE::ResolveModeFlags const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, uint64_t const &, VULKAN_HPP_NAMESPACE::SampleCountFlags const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, driverID, driverName, driverInfo, conformanceVersion, denormBehaviorIndependence, roundingModeIndependence, shaderSignedZeroInfNanPreserveFloat16, shaderSignedZeroInfNanPreserveFloat32, shaderSignedZeroInfNanPreserveFloat64, shaderDenormPreserveFloat16, shaderDenormPreserveFloat32, shaderDenormPreserveFloat64, shaderDenormFlushToZeroFloat16, shaderDenormFlushToZeroFloat32, shaderDenormFlushToZeroFloat64, shaderRoundingModeRTEFloat16, shaderRoundingModeRTEFloat32, shaderRoundingModeRTEFloat64, shaderRoundingModeRTZFloat16, shaderRoundingModeRTZFloat32, shaderRoundingModeRTZFloat64, maxUpdateAfterBindDescriptorsInAllPools, shaderUniformBufferArrayNonUniformIndexingNative, shaderSampledImageArrayNonUniformIndexingNative, shaderStorageBufferArrayNonUniformIndexingNative, shaderStorageImageArrayNonUniformIndexingNative, shaderInputAttachmentArrayNonUniformIndexingNative, robustBufferAccessUpdateAfterBind, quadDivergentImplicitLod, maxPerStageDescriptorUpdateAfterBindSamplers, maxPerStageDescriptorUpdateAfterBindUniformBuffers, maxPerStageDescriptorUpdateAfterBindStorageBuffers, maxPerStageDescriptorUpdateAfterBindSampledImages, maxPerStageDescriptorUpdateAfterBindStorageImages, maxPerStageDescriptorUpdateAfterBindInputAttachments, maxPerStageUpdateAfterBindResources, maxDescriptorSetUpdateAfterBindSamplers, maxDescriptorSetUpdateAfterBindUniformBuffers, maxDescriptorSetUpdateAfterBindUniformBuffersDynamic, maxDescriptorSetUpdateAfterBindStorageBuffers, maxDescriptorSetUpdateAfterBindStorageBuffersDynamic, maxDescriptorSetUpdateAfterBindSampledImages, maxDescriptorSetUpdateAfterBindStorageImages, maxDescriptorSetUpdateAfterBindInputAttachments, supportedDepthResolveModes, supportedStencilResolveModes, independentResolveNone, independentResolve, filterMinmaxSingleComponentFormats, filterMinmaxImageComponentMapping, maxTimelineSemaphoreValueDifference, framebufferIntegerColorSampleCounts); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceVulkan12Properties const &) const = default; #else bool operator==(PhysicalDeviceVulkan12Properties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (driverID == rhs.driverID) && (driverName == rhs.driverName) && (driverInfo == rhs.driverInfo) && (conformanceVersion == rhs.conformanceVersion) && (denormBehaviorIndependence == rhs.denormBehaviorIndependence) && (roundingModeIndependence == rhs.roundingModeIndependence) && (shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16) && (shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32) && (shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64) && (shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16) && (shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32) && (shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64) && (shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16) && (shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32) && (shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64) && (shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16) && (shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32) && (shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64) && (shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16) && (shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32) && (shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64) && (maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools) && (shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative) && (shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative) && (shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative) && (shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative) && (shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative) && (robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind) && (quadDivergentImplicitLod == rhs.quadDivergentImplicitLod) && (maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers) && (maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers) && (maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers) && (maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages) && (maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages) && (maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments) && (maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources) && (maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers) && (maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers) && (maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic) && (maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers) && (maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic) && (maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages) && (maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages) && (maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments) && (supportedDepthResolveModes == rhs.supportedDepthResolveModes) && (supportedStencilResolveModes == rhs.supportedStencilResolveModes) && (independentResolveNone == rhs.independentResolveNone) && (independentResolve == rhs.independentResolve) && (filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats) && (filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping) && (maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference) && (framebufferIntegerColorSampleCounts == rhs.framebufferIntegerColorSampleCounts); # endif } bool operator!=(PhysicalDeviceVulkan12Properties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Properties; void *pNext = {}; VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary; VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverName = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverInfo = {}; VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {}; VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {}; uint32_t maxUpdateAfterBindDescriptorsInAllPools = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {}; VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {}; VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {}; uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {}; uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {}; uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {}; uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {}; uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {}; uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {}; uint32_t maxPerStageUpdateAfterBindResources = {}; uint32_t maxDescriptorSetUpdateAfterBindSamplers = {}; uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {}; uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {}; uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {}; uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {}; uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {}; uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {}; uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {}; VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {}; VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {}; VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {}; VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {}; VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {}; VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {}; uint64_t maxTimelineSemaphoreValueDifference = {}; VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties) == sizeof(VkPhysicalDeviceVulkan12Properties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceVulkan12Properties is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceVulkan12Properties; }; struct PhysicalDeviceVulkan13Features { using NativeType = VkPhysicalDeviceVulkan13Features; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan13Features; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Features(VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {}, VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {}, VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {}, VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ = {}, VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ = {}, VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), robustImageAccess(robustImageAccess_), inlineUniformBlock(inlineUniformBlock_), descriptorBindingInlineUniformBlockUpdateAfterBind(descriptorBindingInlineUniformBlockUpdateAfterBind_), pipelineCreationCacheControl(pipelineCreationCacheControl_), privateData(privateData_), shaderDemoteToHelperInvocation(shaderDemoteToHelperInvocation_), shaderTerminateInvocation(shaderTerminateInvocation_), subgroupSizeControl(subgroupSizeControl_), computeFullSubgroups(computeFullSubgroups_), synchronization2(synchronization2_), textureCompressionASTC_HDR(textureCompressionASTC_HDR_), shaderZeroInitializeWorkgroupMemory(shaderZeroInitializeWorkgroupMemory_), dynamicRendering(dynamicRendering_), shaderIntegerDotProduct(shaderIntegerDotProduct_), maintenance4(maintenance4_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Features(PhysicalDeviceVulkan13Features const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVulkan13Features(VkPhysicalDeviceVulkan13Features const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceVulkan13Features(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceVulkan13Features &operator=(PhysicalDeviceVulkan13Features const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVulkan13Features &operator=(VkPhysicalDeviceVulkan13Features const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &setRobustImageAccess(VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_) VULKAN_HPP_NOEXCEPT { robustImageAccess = robustImageAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &setInlineUniformBlock(VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_) VULKAN_HPP_NOEXCEPT { inlineUniformBlock = inlineUniformBlock_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &setDescriptorBindingInlineUniformBlockUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_) VULKAN_HPP_NOEXCEPT { descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setPipelineCreationCacheControl(VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_) VULKAN_HPP_NOEXCEPT { pipelineCreationCacheControl = pipelineCreationCacheControl_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &setPrivateData(VULKAN_HPP_NAMESPACE::Bool32 privateData_) VULKAN_HPP_NOEXCEPT { privateData = privateData_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setShaderDemoteToHelperInvocation(VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_) VULKAN_HPP_NOEXCEPT { shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setShaderTerminateInvocation(VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_) VULKAN_HPP_NOEXCEPT { shaderTerminateInvocation = shaderTerminateInvocation_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &setSubgroupSizeControl(VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_) VULKAN_HPP_NOEXCEPT { subgroupSizeControl = subgroupSizeControl_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &setComputeFullSubgroups(VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_) VULKAN_HPP_NOEXCEPT { computeFullSubgroups = computeFullSubgroups_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &setSynchronization2(VULKAN_HPP_NAMESPACE::Bool32 synchronization2_) VULKAN_HPP_NOEXCEPT { synchronization2 = synchronization2_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setTextureCompressionASTC_HDR(VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_) VULKAN_HPP_NOEXCEPT { textureCompressionASTC_HDR = textureCompressionASTC_HDR_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setShaderZeroInitializeWorkgroupMemory(VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_) VULKAN_HPP_NOEXCEPT { shaderZeroInitializeWorkgroupMemory = shaderZeroInitializeWorkgroupMemory_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &setDynamicRendering(VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_) VULKAN_HPP_NOEXCEPT { dynamicRendering = dynamicRendering_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setShaderIntegerDotProduct(VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_) VULKAN_HPP_NOEXCEPT { shaderIntegerDotProduct = shaderIntegerDotProduct_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features &setMaintenance4(VULKAN_HPP_NAMESPACE::Bool32 maintenance4_) VULKAN_HPP_NOEXCEPT { maintenance4 = maintenance4_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceVulkan13Features const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceVulkan13Features &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, robustImageAccess, inlineUniformBlock, descriptorBindingInlineUniformBlockUpdateAfterBind, pipelineCreationCacheControl, privateData, shaderDemoteToHelperInvocation, shaderTerminateInvocation, subgroupSizeControl, computeFullSubgroups, synchronization2, textureCompressionASTC_HDR, shaderZeroInitializeWorkgroupMemory, dynamicRendering, shaderIntegerDotProduct, maintenance4); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceVulkan13Features const &) const = default; #else bool operator==(PhysicalDeviceVulkan13Features const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (robustImageAccess == rhs.robustImageAccess) && (inlineUniformBlock == rhs.inlineUniformBlock) && (descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind) && (pipelineCreationCacheControl == rhs.pipelineCreationCacheControl) && (privateData == rhs.privateData) && (shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation) && (shaderTerminateInvocation == rhs.shaderTerminateInvocation) && (subgroupSizeControl == rhs.subgroupSizeControl) && (computeFullSubgroups == rhs.computeFullSubgroups) && (synchronization2 == rhs.synchronization2) && (textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR) && (shaderZeroInitializeWorkgroupMemory == rhs.shaderZeroInitializeWorkgroupMemory) && (dynamicRendering == rhs.dynamicRendering) && (shaderIntegerDotProduct == rhs.shaderIntegerDotProduct) && (maintenance4 == rhs.maintenance4); # endif } bool operator!=(PhysicalDeviceVulkan13Features const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan13Features; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess = {}; VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {}; VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {}; VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {}; VULKAN_HPP_NAMESPACE::Bool32 privateData = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation = {}; VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {}; VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {}; VULKAN_HPP_NAMESPACE::Bool32 synchronization2 = {}; VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory = {}; VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct = {}; VULKAN_HPP_NAMESPACE::Bool32 maintenance4 = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features) == sizeof(VkPhysicalDeviceVulkan13Features), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceVulkan13Features is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceVulkan13Features; }; struct PhysicalDeviceVulkan13Properties { using NativeType = VkPhysicalDeviceVulkan13Properties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan13Properties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Properties(uint32_t minSubgroupSize_ = {}, uint32_t maxSubgroupSize_ = {}, uint32_t maxComputeWorkgroupSubgroups_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {}, uint32_t maxInlineUniformBlockSize_ = {}, uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {}, uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {}, uint32_t maxInlineUniformTotalSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), minSubgroupSize(minSubgroupSize_), maxSubgroupSize(maxSubgroupSize_), maxComputeWorkgroupSubgroups(maxComputeWorkgroupSubgroups_), requiredSubgroupSizeStages(requiredSubgroupSizeStages_), maxInlineUniformBlockSize(maxInlineUniformBlockSize_), maxPerStageDescriptorInlineUniformBlocks(maxPerStageDescriptorInlineUniformBlocks_), maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks(maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_), maxDescriptorSetInlineUniformBlocks(maxDescriptorSetInlineUniformBlocks_), maxDescriptorSetUpdateAfterBindInlineUniformBlocks(maxDescriptorSetUpdateAfterBindInlineUniformBlocks_), maxInlineUniformTotalSize(maxInlineUniformTotalSize_), integerDotProduct8BitUnsignedAccelerated(integerDotProduct8BitUnsignedAccelerated_), integerDotProduct8BitSignedAccelerated(integerDotProduct8BitSignedAccelerated_), integerDotProduct8BitMixedSignednessAccelerated(integerDotProduct8BitMixedSignednessAccelerated_), integerDotProduct4x8BitPackedUnsignedAccelerated(integerDotProduct4x8BitPackedUnsignedAccelerated_), integerDotProduct4x8BitPackedSignedAccelerated(integerDotProduct4x8BitPackedSignedAccelerated_), integerDotProduct4x8BitPackedMixedSignednessAccelerated(integerDotProduct4x8BitPackedMixedSignednessAccelerated_), integerDotProduct16BitUnsignedAccelerated(integerDotProduct16BitUnsignedAccelerated_), integerDotProduct16BitSignedAccelerated(integerDotProduct16BitSignedAccelerated_), integerDotProduct16BitMixedSignednessAccelerated(integerDotProduct16BitMixedSignednessAccelerated_), integerDotProduct32BitUnsignedAccelerated(integerDotProduct32BitUnsignedAccelerated_), integerDotProduct32BitSignedAccelerated(integerDotProduct32BitSignedAccelerated_), integerDotProduct32BitMixedSignednessAccelerated(integerDotProduct32BitMixedSignednessAccelerated_), integerDotProduct64BitUnsignedAccelerated(integerDotProduct64BitUnsignedAccelerated_), integerDotProduct64BitSignedAccelerated(integerDotProduct64BitSignedAccelerated_), integerDotProduct64BitMixedSignednessAccelerated(integerDotProduct64BitMixedSignednessAccelerated_), integerDotProductAccumulatingSaturating8BitUnsignedAccelerated(integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_), integerDotProductAccumulatingSaturating8BitSignedAccelerated(integerDotProductAccumulatingSaturating8BitSignedAccelerated_), integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated(integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_), integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated(integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_), integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated(integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_), integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_), integerDotProductAccumulatingSaturating16BitUnsignedAccelerated(integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_), integerDotProductAccumulatingSaturating16BitSignedAccelerated(integerDotProductAccumulatingSaturating16BitSignedAccelerated_), integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated(integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_), integerDotProductAccumulatingSaturating32BitUnsignedAccelerated(integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_), integerDotProductAccumulatingSaturating32BitSignedAccelerated(integerDotProductAccumulatingSaturating32BitSignedAccelerated_), integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated(integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_), integerDotProductAccumulatingSaturating64BitUnsignedAccelerated(integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_), integerDotProductAccumulatingSaturating64BitSignedAccelerated(integerDotProductAccumulatingSaturating64BitSignedAccelerated_), integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated(integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_), storageTexelBufferOffsetAlignmentBytes(storageTexelBufferOffsetAlignmentBytes_), storageTexelBufferOffsetSingleTexelAlignment(storageTexelBufferOffsetSingleTexelAlignment_), uniformTexelBufferOffsetAlignmentBytes(uniformTexelBufferOffsetAlignmentBytes_), uniformTexelBufferOffsetSingleTexelAlignment(uniformTexelBufferOffsetSingleTexelAlignment_), maxBufferSize(maxBufferSize_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Properties(PhysicalDeviceVulkan13Properties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVulkan13Properties(VkPhysicalDeviceVulkan13Properties const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceVulkan13Properties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceVulkan13Properties &operator=(PhysicalDeviceVulkan13Properties const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVulkan13Properties &operator=(VkPhysicalDeviceVulkan13Properties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPhysicalDeviceVulkan13Properties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceVulkan13Properties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, minSubgroupSize, maxSubgroupSize, maxComputeWorkgroupSubgroups, requiredSubgroupSizeStages, maxInlineUniformBlockSize, maxPerStageDescriptorInlineUniformBlocks, maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks, maxDescriptorSetInlineUniformBlocks, maxDescriptorSetUpdateAfterBindInlineUniformBlocks, maxInlineUniformTotalSize, integerDotProduct8BitUnsignedAccelerated, integerDotProduct8BitSignedAccelerated, integerDotProduct8BitMixedSignednessAccelerated, integerDotProduct4x8BitPackedUnsignedAccelerated, integerDotProduct4x8BitPackedSignedAccelerated, integerDotProduct4x8BitPackedMixedSignednessAccelerated, integerDotProduct16BitUnsignedAccelerated, integerDotProduct16BitSignedAccelerated, integerDotProduct16BitMixedSignednessAccelerated, integerDotProduct32BitUnsignedAccelerated, integerDotProduct32BitSignedAccelerated, integerDotProduct32BitMixedSignednessAccelerated, integerDotProduct64BitUnsignedAccelerated, integerDotProduct64BitSignedAccelerated, integerDotProduct64BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating8BitUnsignedAccelerated, integerDotProductAccumulatingSaturating8BitSignedAccelerated, integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated, integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated, integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated, integerDotProductAccumulatingSaturating16BitUnsignedAccelerated, integerDotProductAccumulatingSaturating16BitSignedAccelerated, integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating32BitUnsignedAccelerated, integerDotProductAccumulatingSaturating32BitSignedAccelerated, integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated, integerDotProductAccumulatingSaturating64BitUnsignedAccelerated, integerDotProductAccumulatingSaturating64BitSignedAccelerated, integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated, storageTexelBufferOffsetAlignmentBytes, storageTexelBufferOffsetSingleTexelAlignment, uniformTexelBufferOffsetAlignmentBytes, uniformTexelBufferOffsetSingleTexelAlignment, maxBufferSize); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceVulkan13Properties const &) const = default; #else bool operator==(PhysicalDeviceVulkan13Properties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (minSubgroupSize == rhs.minSubgroupSize) && (maxSubgroupSize == rhs.maxSubgroupSize) && (maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups) && (requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages) && (maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize) && (maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks) && (maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks) && (maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks) && (maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks) && (maxInlineUniformTotalSize == rhs.maxInlineUniformTotalSize) && (integerDotProduct8BitUnsignedAccelerated == rhs.integerDotProduct8BitUnsignedAccelerated) && (integerDotProduct8BitSignedAccelerated == rhs.integerDotProduct8BitSignedAccelerated) && (integerDotProduct8BitMixedSignednessAccelerated == rhs.integerDotProduct8BitMixedSignednessAccelerated) && (integerDotProduct4x8BitPackedUnsignedAccelerated == rhs.integerDotProduct4x8BitPackedUnsignedAccelerated) && (integerDotProduct4x8BitPackedSignedAccelerated == rhs.integerDotProduct4x8BitPackedSignedAccelerated) && (integerDotProduct4x8BitPackedMixedSignednessAccelerated == rhs.integerDotProduct4x8BitPackedMixedSignednessAccelerated) && (integerDotProduct16BitUnsignedAccelerated == rhs.integerDotProduct16BitUnsignedAccelerated) && (integerDotProduct16BitSignedAccelerated == rhs.integerDotProduct16BitSignedAccelerated) && (integerDotProduct16BitMixedSignednessAccelerated == rhs.integerDotProduct16BitMixedSignednessAccelerated) && (integerDotProduct32BitUnsignedAccelerated == rhs.integerDotProduct32BitUnsignedAccelerated) && (integerDotProduct32BitSignedAccelerated == rhs.integerDotProduct32BitSignedAccelerated) && (integerDotProduct32BitMixedSignednessAccelerated == rhs.integerDotProduct32BitMixedSignednessAccelerated) && (integerDotProduct64BitUnsignedAccelerated == rhs.integerDotProduct64BitUnsignedAccelerated) && (integerDotProduct64BitSignedAccelerated == rhs.integerDotProduct64BitSignedAccelerated) && (integerDotProduct64BitMixedSignednessAccelerated == rhs.integerDotProduct64BitMixedSignednessAccelerated) && (integerDotProductAccumulatingSaturating8BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated) && (integerDotProductAccumulatingSaturating8BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitSignedAccelerated) && (integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated) && (integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated) && (integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated == rhs.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated) && (integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated) && (integerDotProductAccumulatingSaturating16BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated) && (integerDotProductAccumulatingSaturating16BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitSignedAccelerated) && (integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated) && (integerDotProductAccumulatingSaturating32BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated) && (integerDotProductAccumulatingSaturating32BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitSignedAccelerated) && (integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated) && (integerDotProductAccumulatingSaturating64BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated) && (integerDotProductAccumulatingSaturating64BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitSignedAccelerated) && (integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated == rhs.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated) && (storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes) && (storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment) && (uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes) && (uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment) && (maxBufferSize == rhs.maxBufferSize); # endif } bool operator!=(PhysicalDeviceVulkan13Properties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan13Properties; void *pNext = {}; uint32_t minSubgroupSize = {}; uint32_t maxSubgroupSize = {}; uint32_t maxComputeWorkgroupSubgroups = {}; VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {}; uint32_t maxInlineUniformBlockSize = {}; uint32_t maxPerStageDescriptorInlineUniformBlocks = {}; uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {}; uint32_t maxDescriptorSetInlineUniformBlocks = {}; uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {}; uint32_t maxInlineUniformTotalSize = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated = {}; VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated = {}; VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {}; VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {}; VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {}; VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {}; VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties) == sizeof(VkPhysicalDeviceVulkan13Properties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceVulkan13Properties is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceVulkan13Properties; }; struct PhysicalDeviceVulkanMemoryModelFeatures { using NativeType = VkPhysicalDeviceVulkanMemoryModelFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures(VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), vulkanMemoryModel(vulkanMemoryModel_), vulkanMemoryModelDeviceScope(vulkanMemoryModelDeviceScope_), vulkanMemoryModelAvailabilityVisibilityChains(vulkanMemoryModelAvailabilityVisibilityChains_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures(PhysicalDeviceVulkanMemoryModelFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVulkanMemoryModelFeatures(VkPhysicalDeviceVulkanMemoryModelFeatures const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceVulkanMemoryModelFeatures(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceVulkanMemoryModelFeatures &operator=(PhysicalDeviceVulkanMemoryModelFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceVulkanMemoryModelFeatures &operator=(VkPhysicalDeviceVulkanMemoryModelFeatures const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModel(VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_) VULKAN_HPP_NOEXCEPT { vulkanMemoryModel = vulkanMemoryModel_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModelDeviceScope(VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_) VULKAN_HPP_NOEXCEPT { vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModelAvailabilityVisibilityChains(VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_) VULKAN_HPP_NOEXCEPT { vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceVulkanMemoryModelFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceVulkanMemoryModelFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, vulkanMemoryModel, vulkanMemoryModelDeviceScope, vulkanMemoryModelAvailabilityVisibilityChains); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceVulkanMemoryModelFeatures const &) const = default; #else bool operator==(PhysicalDeviceVulkanMemoryModelFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (vulkanMemoryModel == rhs.vulkanMemoryModel) && (vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope) && (vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains); # endif } bool operator!=(PhysicalDeviceVulkanMemoryModelFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {}; VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {}; VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures) == sizeof(VkPhysicalDeviceVulkanMemoryModelFeatures), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceVulkanMemoryModelFeatures is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceVulkanMemoryModelFeatures; }; using PhysicalDeviceVulkanMemoryModelFeaturesKHR = PhysicalDeviceVulkanMemoryModelFeatures; struct PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR { using NativeType = VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout_ = {}, VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ = {}, VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), workgroupMemoryExplicitLayout(workgroupMemoryExplicitLayout_), workgroupMemoryExplicitLayoutScalarBlockLayout(workgroupMemoryExplicitLayoutScalarBlockLayout_), workgroupMemoryExplicitLayout8BitAccess(workgroupMemoryExplicitLayout8BitAccess_), workgroupMemoryExplicitLayout16BitAccess(workgroupMemoryExplicitLayout16BitAccess_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR(PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR(VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & operator=(PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & operator=(VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayout(VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout_) VULKAN_HPP_NOEXCEPT { workgroupMemoryExplicitLayout = workgroupMemoryExplicitLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayoutScalarBlockLayout(VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_) VULKAN_HPP_NOEXCEPT { workgroupMemoryExplicitLayoutScalarBlockLayout = workgroupMemoryExplicitLayoutScalarBlockLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayout8BitAccess(VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_) VULKAN_HPP_NOEXCEPT { workgroupMemoryExplicitLayout8BitAccess = workgroupMemoryExplicitLayout8BitAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayout16BitAccess(VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_) VULKAN_HPP_NOEXCEPT { workgroupMemoryExplicitLayout16BitAccess = workgroupMemoryExplicitLayout16BitAccess_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, workgroupMemoryExplicitLayout, workgroupMemoryExplicitLayoutScalarBlockLayout, workgroupMemoryExplicitLayout8BitAccess, workgroupMemoryExplicitLayout16BitAccess); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const &) const = default; #else bool operator==(PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (workgroupMemoryExplicitLayout == rhs.workgroupMemoryExplicitLayout) && (workgroupMemoryExplicitLayoutScalarBlockLayout == rhs.workgroupMemoryExplicitLayoutScalarBlockLayout) && (workgroupMemoryExplicitLayout8BitAccess == rhs.workgroupMemoryExplicitLayout8BitAccess) && (workgroupMemoryExplicitLayout16BitAccess == rhs.workgroupMemoryExplicitLayout16BitAccess); # endif } bool operator!=(PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout = {}; VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout = {}; VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess = {}; VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR) == sizeof(VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; }; struct PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT { using NativeType = VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), ycbcr2plane444Formats(ycbcr2plane444Formats_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT(PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT(VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT &operator=(PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT &operator=(VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & setYcbcr2plane444Formats(VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_) VULKAN_HPP_NOEXCEPT { ycbcr2plane444Formats = ycbcr2plane444Formats_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, ycbcr2plane444Formats); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (ycbcr2plane444Formats == rhs.ycbcr2plane444Formats); # endif } bool operator!=(PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT) == sizeof(VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; }; struct PhysicalDeviceYcbcrImageArraysFeaturesEXT { using NativeType = VkPhysicalDeviceYcbcrImageArraysFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), ycbcrImageArrays(ycbcrImageArrays_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT(PhysicalDeviceYcbcrImageArraysFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceYcbcrImageArraysFeaturesEXT(VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceYcbcrImageArraysFeaturesEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceYcbcrImageArraysFeaturesEXT &operator=(PhysicalDeviceYcbcrImageArraysFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceYcbcrImageArraysFeaturesEXT &operator=(VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrImageArraysFeaturesEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrImageArraysFeaturesEXT & setYcbcrImageArrays(VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_) VULKAN_HPP_NOEXCEPT { ycbcrImageArrays = ycbcrImageArrays_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, ycbcrImageArrays); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceYcbcrImageArraysFeaturesEXT const &) const = default; #else bool operator==(PhysicalDeviceYcbcrImageArraysFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (ycbcrImageArrays == rhs.ycbcrImageArrays); # endif } bool operator!=(PhysicalDeviceYcbcrImageArraysFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT) == sizeof(VkPhysicalDeviceYcbcrImageArraysFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceYcbcrImageArraysFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceYcbcrImageArraysFeaturesEXT; }; struct PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures { using NativeType = VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), shaderZeroInitializeWorkgroupMemory(shaderZeroInitializeWorkgroupMemory_) { } VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures(PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures(VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const &rhs) VULKAN_HPP_NOEXCEPT : PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & operator=(PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const &rhs) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures &operator=(VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & setShaderZeroInitializeWorkgroupMemory(VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_) VULKAN_HPP_NOEXCEPT { shaderZeroInitializeWorkgroupMemory = shaderZeroInitializeWorkgroupMemory_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, shaderZeroInitializeWorkgroupMemory); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const &) const = default; #else bool operator==(PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (shaderZeroInitializeWorkgroupMemory == rhs.shaderZeroInitializeWorkgroupMemory); # endif } bool operator!=(PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures) == sizeof(VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures is not nothrow_move_constructible!"); template<> struct CppType { using Type = PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; }; using PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR = PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; struct PipelineCacheCreateInfo { using NativeType = VkPipelineCacheCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCacheCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ = {}, size_t initialDataSize_ = {}, const void *pInitialData_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), initialDataSize(initialDataSize_), pInitialData(pInitialData_) { } VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo(PipelineCacheCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineCacheCreateInfo(VkPipelineCacheCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : PipelineCacheCreateInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) template PipelineCacheCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &initialData_, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , initialDataSize(initialData_.size() * sizeof(T)) , pInitialData(initialData_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineCacheCreateInfo &operator=(PipelineCacheCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineCacheCreateInfo &operator=(VkPipelineCacheCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo &setFlags(VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo &setInitialDataSize(size_t initialDataSize_) VULKAN_HPP_NOEXCEPT { initialDataSize = initialDataSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo &setPInitialData(const void *pInitialData_) VULKAN_HPP_NOEXCEPT { pInitialData = pInitialData_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) template PipelineCacheCreateInfo &setInitialData(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &initialData_) VULKAN_HPP_NOEXCEPT { initialDataSize = initialData_.size() * sizeof(T); pInitialData = initialData_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineCacheCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineCacheCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, initialDataSize, pInitialData); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineCacheCreateInfo const &) const = default; #else bool operator==(PipelineCacheCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (initialDataSize == rhs.initialDataSize) && (pInitialData == rhs.pInitialData); # endif } bool operator!=(PipelineCacheCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCacheCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags = {}; size_t initialDataSize = {}; const void *pInitialData = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo) == sizeof(VkPipelineCacheCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineCacheCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineCacheCreateInfo; }; struct PipelineCacheHeaderVersionOne { using NativeType = VkPipelineCacheHeaderVersionOne; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne(uint32_t headerSize_ = {}, VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion_ = VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion::eOne, uint32_t vendorID_ = {}, uint32_t deviceID_ = {}, std::array const &pipelineCacheUUID_ = {}) VULKAN_HPP_NOEXCEPT : headerSize(headerSize_), headerVersion(headerVersion_), vendorID(vendorID_), deviceID(deviceID_), pipelineCacheUUID(pipelineCacheUUID_) { } VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne(PipelineCacheHeaderVersionOne const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineCacheHeaderVersionOne(VkPipelineCacheHeaderVersionOne const &rhs) VULKAN_HPP_NOEXCEPT : PipelineCacheHeaderVersionOne(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineCacheHeaderVersionOne &operator=(PipelineCacheHeaderVersionOne const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineCacheHeaderVersionOne &operator=(VkPipelineCacheHeaderVersionOne const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne &setHeaderSize(uint32_t headerSize_) VULKAN_HPP_NOEXCEPT { headerSize = headerSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setHeaderVersion(VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion_) VULKAN_HPP_NOEXCEPT { headerVersion = headerVersion_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne &setVendorID(uint32_t vendorID_) VULKAN_HPP_NOEXCEPT { vendorID = vendorID_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne &setDeviceID(uint32_t deviceID_) VULKAN_HPP_NOEXCEPT { deviceID = deviceID_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne &setPipelineCacheUUID(std::array pipelineCacheUUID_) VULKAN_HPP_NOEXCEPT { pipelineCacheUUID = pipelineCacheUUID_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineCacheHeaderVersionOne const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineCacheHeaderVersionOne &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(headerSize, headerVersion, vendorID, deviceID, pipelineCacheUUID); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineCacheHeaderVersionOne const &) const = default; #else bool operator==(PipelineCacheHeaderVersionOne const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (headerSize == rhs.headerSize) && (headerVersion == rhs.headerVersion) && (vendorID == rhs.vendorID) && (deviceID == rhs.deviceID) && (pipelineCacheUUID == rhs.pipelineCacheUUID); # endif } bool operator!=(PipelineCacheHeaderVersionOne const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t headerSize = {}; VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion = VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion::eOne; uint32_t vendorID = {}; uint32_t deviceID = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D pipelineCacheUUID = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne) == sizeof(VkPipelineCacheHeaderVersionOne), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineCacheHeaderVersionOne is not nothrow_move_constructible!"); struct PipelineColorBlendAdvancedStateCreateInfoEXT { using NativeType = VkPipelineColorBlendAdvancedStateCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {}, VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {}, VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), srcPremultiplied(srcPremultiplied_), dstPremultiplied(dstPremultiplied_), blendOverlap(blendOverlap_) { } VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT(PipelineColorBlendAdvancedStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineColorBlendAdvancedStateCreateInfoEXT(VkPipelineColorBlendAdvancedStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : PipelineColorBlendAdvancedStateCreateInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineColorBlendAdvancedStateCreateInfoEXT &operator=(PipelineColorBlendAdvancedStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineColorBlendAdvancedStateCreateInfoEXT &operator=(VkPipelineColorBlendAdvancedStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & setSrcPremultiplied(VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_) VULKAN_HPP_NOEXCEPT { srcPremultiplied = srcPremultiplied_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & setDstPremultiplied(VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_) VULKAN_HPP_NOEXCEPT { dstPremultiplied = dstPremultiplied_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & setBlendOverlap(VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_) VULKAN_HPP_NOEXCEPT { blendOverlap = blendOverlap_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineColorBlendAdvancedStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, srcPremultiplied, dstPremultiplied, blendOverlap); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineColorBlendAdvancedStateCreateInfoEXT const &) const = default; #else bool operator==(PipelineColorBlendAdvancedStateCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (srcPremultiplied == rhs.srcPremultiplied) && (dstPremultiplied == rhs.dstPremultiplied) && (blendOverlap == rhs.blendOverlap); # endif } bool operator!=(PipelineColorBlendAdvancedStateCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied = {}; VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied = {}; VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT) == sizeof(VkPipelineColorBlendAdvancedStateCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineColorBlendAdvancedStateCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineColorBlendAdvancedStateCreateInfoEXT; }; struct PipelineColorWriteCreateInfoEXT { using NativeType = VkPipelineColorWriteCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorWriteCreateInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineColorWriteCreateInfoEXT(uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::Bool32 *pColorWriteEnables_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), attachmentCount(attachmentCount_), pColorWriteEnables(pColorWriteEnables_) { } VULKAN_HPP_CONSTEXPR PipelineColorWriteCreateInfoEXT(PipelineColorWriteCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineColorWriteCreateInfoEXT(VkPipelineColorWriteCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : PipelineColorWriteCreateInfoEXT(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineColorWriteCreateInfoEXT(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &colorWriteEnables_, const void *pNext_ = nullptr) : pNext(pNext_) , attachmentCount(static_cast(colorWriteEnables_.size())) , pColorWriteEnables(colorWriteEnables_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineColorWriteCreateInfoEXT &operator=(PipelineColorWriteCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineColorWriteCreateInfoEXT &operator=(VkPipelineColorWriteCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT &setAttachmentCount(uint32_t attachmentCount_) VULKAN_HPP_NOEXCEPT { attachmentCount = attachmentCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & setPColorWriteEnables(const VULKAN_HPP_NAMESPACE::Bool32 *pColorWriteEnables_) VULKAN_HPP_NOEXCEPT { pColorWriteEnables = pColorWriteEnables_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineColorWriteCreateInfoEXT & setColorWriteEnables(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &colorWriteEnables_) VULKAN_HPP_NOEXCEPT { attachmentCount = static_cast(colorWriteEnables_.size()); pColorWriteEnables = colorWriteEnables_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineColorWriteCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineColorWriteCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, attachmentCount, pColorWriteEnables); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineColorWriteCreateInfoEXT const &) const = default; #else bool operator==(PipelineColorWriteCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (attachmentCount == rhs.attachmentCount) && (pColorWriteEnables == rhs.pColorWriteEnables); # endif } bool operator!=(PipelineColorWriteCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorWriteCreateInfoEXT; const void *pNext = {}; uint32_t attachmentCount = {}; const VULKAN_HPP_NAMESPACE::Bool32 *pColorWriteEnables = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT) == sizeof(VkPipelineColorWriteCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineColorWriteCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineColorWriteCreateInfoEXT; }; struct PipelineCompilerControlCreateInfoAMD { using NativeType = VkPipelineCompilerControlCreateInfoAMD; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCompilerControlCreateInfoAMD; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD(VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), compilerControlFlags(compilerControlFlags_) { } VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD(PipelineCompilerControlCreateInfoAMD const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineCompilerControlCreateInfoAMD(VkPipelineCompilerControlCreateInfoAMD const &rhs) VULKAN_HPP_NOEXCEPT : PipelineCompilerControlCreateInfoAMD(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineCompilerControlCreateInfoAMD &operator=(PipelineCompilerControlCreateInfoAMD const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineCompilerControlCreateInfoAMD &operator=(VkPipelineCompilerControlCreateInfoAMD const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineCompilerControlCreateInfoAMD &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineCompilerControlCreateInfoAMD & setCompilerControlFlags(VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_) VULKAN_HPP_NOEXCEPT { compilerControlFlags = compilerControlFlags_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineCompilerControlCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineCompilerControlCreateInfoAMD &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, compilerControlFlags); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineCompilerControlCreateInfoAMD const &) const = default; #else bool operator==(PipelineCompilerControlCreateInfoAMD const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (compilerControlFlags == rhs.compilerControlFlags); # endif } bool operator!=(PipelineCompilerControlCreateInfoAMD const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCompilerControlCreateInfoAMD; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD) == sizeof(VkPipelineCompilerControlCreateInfoAMD), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineCompilerControlCreateInfoAMD is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineCompilerControlCreateInfoAMD; }; struct PipelineCoverageModulationStateCreateInfoNV { using NativeType = VkPipelineCoverageModulationStateCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageModulationStateCreateInfoNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone, VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ = {}, uint32_t coverageModulationTableCount_ = {}, const float *pCoverageModulationTable_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), coverageModulationMode(coverageModulationMode_), coverageModulationTableEnable(coverageModulationTableEnable_), coverageModulationTableCount(coverageModulationTableCount_), pCoverageModulationTable(pCoverageModulationTable_) { } VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV(PipelineCoverageModulationStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineCoverageModulationStateCreateInfoNV(VkPipelineCoverageModulationStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : PipelineCoverageModulationStateCreateInfoNV(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineCoverageModulationStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_, VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_, VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &coverageModulationTable_, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , coverageModulationMode(coverageModulationMode_) , coverageModulationTableEnable(coverageModulationTableEnable_) , coverageModulationTableCount(static_cast(coverageModulationTable_.size())) , pCoverageModulationTable(coverageModulationTable_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineCoverageModulationStateCreateInfoNV &operator=(PipelineCoverageModulationStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineCoverageModulationStateCreateInfoNV &operator=(VkPipelineCoverageModulationStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setFlags(VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationMode(VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_) VULKAN_HPP_NOEXCEPT { coverageModulationMode = coverageModulationMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTableEnable(VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_) VULKAN_HPP_NOEXCEPT { coverageModulationTableEnable = coverageModulationTableEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTableCount(uint32_t coverageModulationTableCount_) VULKAN_HPP_NOEXCEPT { coverageModulationTableCount = coverageModulationTableCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setPCoverageModulationTable(const float *pCoverageModulationTable_) VULKAN_HPP_NOEXCEPT { pCoverageModulationTable = pCoverageModulationTable_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTable(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &coverageModulationTable_) VULKAN_HPP_NOEXCEPT { coverageModulationTableCount = static_cast(coverageModulationTable_.size()); pCoverageModulationTable = coverageModulationTable_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineCoverageModulationStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineCoverageModulationStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, coverageModulationMode, coverageModulationTableEnable, coverageModulationTableCount, pCoverageModulationTable); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineCoverageModulationStateCreateInfoNV const &) const = default; #else bool operator==(PipelineCoverageModulationStateCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (coverageModulationMode == rhs.coverageModulationMode) && (coverageModulationTableEnable == rhs.coverageModulationTableEnable) && (coverageModulationTableCount == rhs.coverageModulationTableCount) && (pCoverageModulationTable == rhs.pCoverageModulationTable); # endif } bool operator!=(PipelineCoverageModulationStateCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageModulationStateCreateInfoNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags = {}; VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone; VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable = {}; uint32_t coverageModulationTableCount = {}; const float *pCoverageModulationTable = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV) == sizeof(VkPipelineCoverageModulationStateCreateInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineCoverageModulationStateCreateInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineCoverageModulationStateCreateInfoNV; }; struct PipelineCoverageReductionStateCreateInfoNV { using NativeType = VkPipelineCoverageReductionStateCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageReductionStateCreateInfoNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), coverageReductionMode(coverageReductionMode_) { } VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV(PipelineCoverageReductionStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineCoverageReductionStateCreateInfoNV(VkPipelineCoverageReductionStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : PipelineCoverageReductionStateCreateInfoNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineCoverageReductionStateCreateInfoNV &operator=(PipelineCoverageReductionStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineCoverageReductionStateCreateInfoNV &operator=(VkPipelineCoverageReductionStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & setFlags(VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & setCoverageReductionMode(VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_) VULKAN_HPP_NOEXCEPT { coverageReductionMode = coverageReductionMode_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineCoverageReductionStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineCoverageReductionStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, coverageReductionMode); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineCoverageReductionStateCreateInfoNV const &) const = default; #else bool operator==(PipelineCoverageReductionStateCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (coverageReductionMode == rhs.coverageReductionMode); # endif } bool operator!=(PipelineCoverageReductionStateCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageReductionStateCreateInfoNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags = {}; VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV) == sizeof(VkPipelineCoverageReductionStateCreateInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineCoverageReductionStateCreateInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineCoverageReductionStateCreateInfoNV; }; struct PipelineCoverageToColorStateCreateInfoNV { using NativeType = VkPipelineCoverageToColorStateCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageToColorStateCreateInfoNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ = {}, uint32_t coverageToColorLocation_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), coverageToColorEnable(coverageToColorEnable_), coverageToColorLocation(coverageToColorLocation_) { } VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV(PipelineCoverageToColorStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineCoverageToColorStateCreateInfoNV(VkPipelineCoverageToColorStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : PipelineCoverageToColorStateCreateInfoNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineCoverageToColorStateCreateInfoNV &operator=(PipelineCoverageToColorStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineCoverageToColorStateCreateInfoNV &operator=(VkPipelineCoverageToColorStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & setFlags(VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & setCoverageToColorEnable(VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_) VULKAN_HPP_NOEXCEPT { coverageToColorEnable = coverageToColorEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV &setCoverageToColorLocation(uint32_t coverageToColorLocation_) VULKAN_HPP_NOEXCEPT { coverageToColorLocation = coverageToColorLocation_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineCoverageToColorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineCoverageToColorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, coverageToColorEnable, coverageToColorLocation); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineCoverageToColorStateCreateInfoNV const &) const = default; #else bool operator==(PipelineCoverageToColorStateCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (coverageToColorEnable == rhs.coverageToColorEnable) && (coverageToColorLocation == rhs.coverageToColorLocation); # endif } bool operator!=(PipelineCoverageToColorStateCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageToColorStateCreateInfoNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags = {}; VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable = {}; uint32_t coverageToColorLocation = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV) == sizeof(VkPipelineCoverageToColorStateCreateInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineCoverageToColorStateCreateInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineCoverageToColorStateCreateInfoNV; }; struct PipelineCreationFeedback { using NativeType = VkPipelineCreationFeedback; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineCreationFeedback(VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags flags_ = {}, uint64_t duration_ = {}) VULKAN_HPP_NOEXCEPT : flags(flags_), duration(duration_) { } VULKAN_HPP_CONSTEXPR PipelineCreationFeedback(PipelineCreationFeedback const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineCreationFeedback(VkPipelineCreationFeedback const &rhs) VULKAN_HPP_NOEXCEPT : PipelineCreationFeedback(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineCreationFeedback &operator=(PipelineCreationFeedback const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineCreationFeedback &operator=(VkPipelineCreationFeedback const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPipelineCreationFeedback const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineCreationFeedback &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(flags, duration); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineCreationFeedback const &) const = default; #else bool operator==(PipelineCreationFeedback const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (flags == rhs.flags) && (duration == rhs.duration); # endif } bool operator!=(PipelineCreationFeedback const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags flags = {}; uint64_t duration = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineCreationFeedback) == sizeof(VkPipelineCreationFeedback), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineCreationFeedback is not nothrow_move_constructible!"); using PipelineCreationFeedbackEXT = PipelineCreationFeedback; struct PipelineCreationFeedbackCreateInfo { using NativeType = VkPipelineCreationFeedbackCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCreationFeedbackCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCreationFeedback *pPipelineCreationFeedback_ = {}, uint32_t pipelineStageCreationFeedbackCount_ = {}, VULKAN_HPP_NAMESPACE::PipelineCreationFeedback *pPipelineStageCreationFeedbacks_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), pPipelineCreationFeedback(pPipelineCreationFeedback_), pipelineStageCreationFeedbackCount(pipelineStageCreationFeedbackCount_), pPipelineStageCreationFeedbacks(pPipelineStageCreationFeedbacks_) { } VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfo(PipelineCreationFeedbackCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineCreationFeedbackCreateInfo(VkPipelineCreationFeedbackCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : PipelineCreationFeedbackCreateInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineCreationFeedbackCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback *pPipelineCreationFeedback_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &pipelineStageCreationFeedbacks_, const void *pNext_ = nullptr) : pNext(pNext_) , pPipelineCreationFeedback(pPipelineCreationFeedback_) , pipelineStageCreationFeedbackCount(static_cast(pipelineStageCreationFeedbacks_.size())) , pPipelineStageCreationFeedbacks(pipelineStageCreationFeedbacks_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineCreationFeedbackCreateInfo &operator=(PipelineCreationFeedbackCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineCreationFeedbackCreateInfo &operator=(VkPipelineCreationFeedbackCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & setPPipelineCreationFeedback(VULKAN_HPP_NAMESPACE::PipelineCreationFeedback *pPipelineCreationFeedback_) VULKAN_HPP_NOEXCEPT { pPipelineCreationFeedback = pPipelineCreationFeedback_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & setPipelineStageCreationFeedbackCount(uint32_t pipelineStageCreationFeedbackCount_) VULKAN_HPP_NOEXCEPT { pipelineStageCreationFeedbackCount = pipelineStageCreationFeedbackCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & setPPipelineStageCreationFeedbacks(VULKAN_HPP_NAMESPACE::PipelineCreationFeedback *pPipelineStageCreationFeedbacks_) VULKAN_HPP_NOEXCEPT { pPipelineStageCreationFeedbacks = pPipelineStageCreationFeedbacks_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineCreationFeedbackCreateInfo &setPipelineStageCreationFeedbacks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &pipelineStageCreationFeedbacks_) VULKAN_HPP_NOEXCEPT { pipelineStageCreationFeedbackCount = static_cast(pipelineStageCreationFeedbacks_.size()); pPipelineStageCreationFeedbacks = pipelineStageCreationFeedbacks_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineCreationFeedbackCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineCreationFeedbackCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, pPipelineCreationFeedback, pipelineStageCreationFeedbackCount, pPipelineStageCreationFeedbacks); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineCreationFeedbackCreateInfo const &) const = default; #else bool operator==(PipelineCreationFeedbackCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (pPipelineCreationFeedback == rhs.pPipelineCreationFeedback) && (pipelineStageCreationFeedbackCount == rhs.pipelineStageCreationFeedbackCount) && (pPipelineStageCreationFeedbacks == rhs.pPipelineStageCreationFeedbacks); # endif } bool operator!=(PipelineCreationFeedbackCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreationFeedbackCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineCreationFeedback *pPipelineCreationFeedback = {}; uint32_t pipelineStageCreationFeedbackCount = {}; VULKAN_HPP_NAMESPACE::PipelineCreationFeedback *pPipelineStageCreationFeedbacks = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo) == sizeof(VkPipelineCreationFeedbackCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineCreationFeedbackCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineCreationFeedbackCreateInfo; }; using PipelineCreationFeedbackCreateInfoEXT = PipelineCreationFeedbackCreateInfo; struct PipelineDiscardRectangleStateCreateInfoEXT { using NativeType = VkPipelineDiscardRectangleStateCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive, uint32_t discardRectangleCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D *pDiscardRectangles_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), discardRectangleMode(discardRectangleMode_), discardRectangleCount(discardRectangleCount_), pDiscardRectangles(pDiscardRectangles_) { } VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT(PipelineDiscardRectangleStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineDiscardRectangleStateCreateInfoEXT(VkPipelineDiscardRectangleStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : PipelineDiscardRectangleStateCreateInfoEXT(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineDiscardRectangleStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_, VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &discardRectangles_, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , discardRectangleMode(discardRectangleMode_) , discardRectangleCount(static_cast(discardRectangles_.size())) , pDiscardRectangles(discardRectangles_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineDiscardRectangleStateCreateInfoEXT &operator=(PipelineDiscardRectangleStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineDiscardRectangleStateCreateInfoEXT &operator=(VkPipelineDiscardRectangleStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setFlags(VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangleMode(VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_) VULKAN_HPP_NOEXCEPT { discardRectangleMode = discardRectangleMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT &setDiscardRectangleCount(uint32_t discardRectangleCount_) VULKAN_HPP_NOEXCEPT { discardRectangleCount = discardRectangleCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setPDiscardRectangles(const VULKAN_HPP_NAMESPACE::Rect2D *pDiscardRectangles_) VULKAN_HPP_NOEXCEPT { pDiscardRectangles = pDiscardRectangles_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangles(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &discardRectangles_) VULKAN_HPP_NOEXCEPT { discardRectangleCount = static_cast(discardRectangles_.size()); pDiscardRectangles = discardRectangles_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineDiscardRectangleStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineDiscardRectangleStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, discardRectangleMode, discardRectangleCount, pDiscardRectangles); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineDiscardRectangleStateCreateInfoEXT const &) const = default; #else bool operator==(PipelineDiscardRectangleStateCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (discardRectangleMode == rhs.discardRectangleMode) && (discardRectangleCount == rhs.discardRectangleCount) && (pDiscardRectangles == rhs.pDiscardRectangles); # endif } bool operator!=(PipelineDiscardRectangleStateCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags = {}; VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive; uint32_t discardRectangleCount = {}; const VULKAN_HPP_NAMESPACE::Rect2D *pDiscardRectangles = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT) == sizeof(VkPipelineDiscardRectangleStateCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineDiscardRectangleStateCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineDiscardRectangleStateCreateInfoEXT; }; struct PipelineExecutableInfoKHR { using NativeType = VkPipelineExecutableInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, uint32_t executableIndex_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), pipeline(pipeline_), executableIndex(executableIndex_) { } VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR(PipelineExecutableInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineExecutableInfoKHR(VkPipelineExecutableInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : PipelineExecutableInfoKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineExecutableInfoKHR &operator=(PipelineExecutableInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineExecutableInfoKHR &operator=(VkPipelineExecutableInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR &setPipeline(VULKAN_HPP_NAMESPACE::Pipeline pipeline_) VULKAN_HPP_NOEXCEPT { pipeline = pipeline_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR &setExecutableIndex(uint32_t executableIndex_) VULKAN_HPP_NOEXCEPT { executableIndex = executableIndex_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineExecutableInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineExecutableInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, pipeline, executableIndex); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineExecutableInfoKHR const &) const = default; #else bool operator==(PipelineExecutableInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (pipeline == rhs.pipeline) && (executableIndex == rhs.executableIndex); # endif } bool operator!=(PipelineExecutableInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; uint32_t executableIndex = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR) == sizeof(VkPipelineExecutableInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineExecutableInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineExecutableInfoKHR; }; struct PipelineExecutableInternalRepresentationKHR { using NativeType = VkPipelineExecutableInternalRepresentationKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInternalRepresentationKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR(std::array const &name_ = {}, std::array const &description_ = {}, VULKAN_HPP_NAMESPACE::Bool32 isText_ = {}, size_t dataSize_ = {}, void *pData_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), name(name_), description(description_), isText(isText_), dataSize(dataSize_), pData(pData_) { } VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR(PipelineExecutableInternalRepresentationKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineExecutableInternalRepresentationKHR(VkPipelineExecutableInternalRepresentationKHR const &rhs) VULKAN_HPP_NOEXCEPT : PipelineExecutableInternalRepresentationKHR(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) template PipelineExecutableInternalRepresentationKHR(std::array const &name_, std::array const &description_, VULKAN_HPP_NAMESPACE::Bool32 isText_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &data_, void *pNext_ = nullptr) : pNext(pNext_) , name(name_) , description(description_) , isText(isText_) , dataSize(data_.size() * sizeof(T)) , pData(data_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineExecutableInternalRepresentationKHR &operator=(PipelineExecutableInternalRepresentationKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineExecutableInternalRepresentationKHR &operator=(VkPipelineExecutableInternalRepresentationKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPipelineExecutableInternalRepresentationKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineExecutableInternalRepresentationKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, VULKAN_HPP_NAMESPACE::Bool32 const &, size_t const &, void *const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, name, description, isText, dataSize, pData); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineExecutableInternalRepresentationKHR const &) const = default; #else bool operator==(PipelineExecutableInternalRepresentationKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (name == rhs.name) && (description == rhs.description) && (isText == rhs.isText) && (dataSize == rhs.dataSize) && (pData == rhs.pData); # endif } bool operator!=(PipelineExecutableInternalRepresentationKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInternalRepresentationKHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; VULKAN_HPP_NAMESPACE::Bool32 isText = {}; size_t dataSize = {}; void *pData = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR) == sizeof(VkPipelineExecutableInternalRepresentationKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineExecutableInternalRepresentationKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineExecutableInternalRepresentationKHR; }; struct PipelineExecutablePropertiesKHR { using NativeType = VkPipelineExecutablePropertiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutablePropertiesKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR(VULKAN_HPP_NAMESPACE::ShaderStageFlags stages_ = {}, std::array const &name_ = {}, std::array const &description_ = {}, uint32_t subgroupSize_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), stages(stages_), name(name_), description(description_), subgroupSize(subgroupSize_) { } VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR(PipelineExecutablePropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineExecutablePropertiesKHR(VkPipelineExecutablePropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT : PipelineExecutablePropertiesKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineExecutablePropertiesKHR &operator=(PipelineExecutablePropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineExecutablePropertiesKHR &operator=(VkPipelineExecutablePropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPipelineExecutablePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineExecutablePropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, uint32_t const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, stages, name, description, subgroupSize); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineExecutablePropertiesKHR const &) const = default; #else bool operator==(PipelineExecutablePropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (stages == rhs.stages) && (name == rhs.name) && (description == rhs.description) && (subgroupSize == rhs.subgroupSize); # endif } bool operator!=(PipelineExecutablePropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutablePropertiesKHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::ShaderStageFlags stages = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; uint32_t subgroupSize = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR) == sizeof(VkPipelineExecutablePropertiesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineExecutablePropertiesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineExecutablePropertiesKHR; }; union PipelineExecutableStatisticValueKHR { using NativeType = VkPipelineExecutableStatisticValueKHR; #if !defined(VULKAN_HPP_NO_UNION_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR(VULKAN_HPP_NAMESPACE::Bool32 b32_ = {}) : b32(b32_) {} VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR(int64_t i64_) : i64(i64_) {} VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR(uint64_t u64_) : u64(u64_) {} VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR(double f64_) : f64(f64_) {} #endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ #if !defined(VULKAN_HPP_NO_UNION_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR &setB32(VULKAN_HPP_NAMESPACE::Bool32 b32_) VULKAN_HPP_NOEXCEPT { b32 = b32_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR &setI64(int64_t i64_) VULKAN_HPP_NOEXCEPT { i64 = i64_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR &setU64(uint64_t u64_) VULKAN_HPP_NOEXCEPT { u64 = u64_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR &setF64(double f64_) VULKAN_HPP_NOEXCEPT { f64 = f64_; return *this; } #endif /*VULKAN_HPP_NO_UNION_SETTERS*/ operator VkPipelineExecutableStatisticValueKHR const &() const { return *reinterpret_cast(this); } operator VkPipelineExecutableStatisticValueKHR &() { return *reinterpret_cast(this); } #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS VULKAN_HPP_NAMESPACE::Bool32 b32; int64_t i64; uint64_t u64; double f64; #else VkBool32 b32; int64_t i64; uint64_t u64; double f64; #endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; struct PipelineExecutableStatisticKHR { using NativeType = VkPipelineExecutableStatisticKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableStatisticKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticKHR( std::array const &name_ = {}, std::array const &description_ = {}, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format_ = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), name(name_), description(description_), format(format_), value(value_) { } VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticKHR(PipelineExecutableStatisticKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineExecutableStatisticKHR(VkPipelineExecutableStatisticKHR const &rhs) VULKAN_HPP_NOEXCEPT : PipelineExecutableStatisticKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineExecutableStatisticKHR &operator=(PipelineExecutableStatisticKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineExecutableStatisticKHR &operator=(VkPipelineExecutableStatisticKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPipelineExecutableStatisticKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineExecutableStatisticKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &, VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR const &, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, name, description, format, value); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableStatisticKHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32; VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR) == sizeof(VkPipelineExecutableStatisticKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineExecutableStatisticKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineExecutableStatisticKHR; }; struct PipelineFragmentShadingRateEnumStateCreateInfoNV { using NativeType = VkPipelineFragmentShadingRateEnumStateCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType_ = VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV::eFragmentSize, VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate_ = VULKAN_HPP_NAMESPACE::FragmentShadingRateNV::e1InvocationPerPixel, std::array const & combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } }, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), shadingRateType(shadingRateType_), shadingRate(shadingRate_), combinerOps(combinerOps_) { } VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV(PipelineFragmentShadingRateEnumStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineFragmentShadingRateEnumStateCreateInfoNV(VkPipelineFragmentShadingRateEnumStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : PipelineFragmentShadingRateEnumStateCreateInfoNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineFragmentShadingRateEnumStateCreateInfoNV &operator=(PipelineFragmentShadingRateEnumStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineFragmentShadingRateEnumStateCreateInfoNV &operator=(VkPipelineFragmentShadingRateEnumStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & setShadingRateType(VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType_) VULKAN_HPP_NOEXCEPT { shadingRateType = shadingRateType_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & setShadingRate(VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate_) VULKAN_HPP_NOEXCEPT { shadingRate = shadingRate_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & setCombinerOps(std::array combinerOps_) VULKAN_HPP_NOEXCEPT { combinerOps = combinerOps_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, shadingRateType, shadingRate, combinerOps); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineFragmentShadingRateEnumStateCreateInfoNV const &) const = default; #else bool operator==(PipelineFragmentShadingRateEnumStateCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (shadingRateType == rhs.shadingRateType) && (shadingRate == rhs.shadingRate) && (combinerOps == rhs.combinerOps); # endif } bool operator!=(PipelineFragmentShadingRateEnumStateCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType = VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV::eFragmentSize; VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate = VULKAN_HPP_NAMESPACE::FragmentShadingRateNV::e1InvocationPerPixel; VULKAN_HPP_NAMESPACE::ArrayWrapper1D combinerOps = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV) == sizeof(VkPipelineFragmentShadingRateEnumStateCreateInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineFragmentShadingRateEnumStateCreateInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineFragmentShadingRateEnumStateCreateInfoNV; }; struct PipelineFragmentShadingRateStateCreateInfoKHR { using NativeType = VkPipelineFragmentShadingRateStateCreateInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR( VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {}, std::array const & combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } }, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), fragmentSize(fragmentSize_), combinerOps(combinerOps_) { } VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR(PipelineFragmentShadingRateStateCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineFragmentShadingRateStateCreateInfoKHR(VkPipelineFragmentShadingRateStateCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : PipelineFragmentShadingRateStateCreateInfoKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineFragmentShadingRateStateCreateInfoKHR &operator=(PipelineFragmentShadingRateStateCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineFragmentShadingRateStateCreateInfoKHR &operator=(VkPipelineFragmentShadingRateStateCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & setFragmentSize(VULKAN_HPP_NAMESPACE::Extent2D const &fragmentSize_) VULKAN_HPP_NOEXCEPT { fragmentSize = fragmentSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & setCombinerOps(std::array combinerOps_) VULKAN_HPP_NOEXCEPT { combinerOps = combinerOps_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineFragmentShadingRateStateCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineFragmentShadingRateStateCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, fragmentSize, combinerOps); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineFragmentShadingRateStateCreateInfoKHR const &) const = default; #else bool operator==(PipelineFragmentShadingRateStateCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (fragmentSize == rhs.fragmentSize) && (combinerOps == rhs.combinerOps); # endif } bool operator!=(PipelineFragmentShadingRateStateCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Extent2D fragmentSize = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D combinerOps = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR) == sizeof(VkPipelineFragmentShadingRateStateCreateInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineFragmentShadingRateStateCreateInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineFragmentShadingRateStateCreateInfoKHR; }; struct PipelineInfoKHR { using NativeType = VkPipelineInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineInfoKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), pipeline(pipeline_) { } VULKAN_HPP_CONSTEXPR PipelineInfoKHR(PipelineInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineInfoKHR(VkPipelineInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : PipelineInfoKHR(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineInfoKHR &operator=(PipelineInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineInfoKHR &operator=(VkPipelineInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineInfoKHR &setPipeline(VULKAN_HPP_NAMESPACE::Pipeline pipeline_) VULKAN_HPP_NOEXCEPT { pipeline = pipeline_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, pipeline); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineInfoKHR const &) const = default; #else bool operator==(PipelineInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (pipeline == rhs.pipeline); # endif } bool operator!=(PipelineInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineInfoKHR) == sizeof(VkPipelineInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineInfoKHR; }; struct PushConstantRange { using NativeType = VkPushConstantRange; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PushConstantRange(VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, uint32_t offset_ = {}, uint32_t size_ = {}) VULKAN_HPP_NOEXCEPT : stageFlags(stageFlags_), offset(offset_), size(size_) { } VULKAN_HPP_CONSTEXPR PushConstantRange(PushConstantRange const &rhs) VULKAN_HPP_NOEXCEPT = default; PushConstantRange(VkPushConstantRange const &rhs) VULKAN_HPP_NOEXCEPT : PushConstantRange(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PushConstantRange &operator=(PushConstantRange const &rhs) VULKAN_HPP_NOEXCEPT = default; PushConstantRange &operator=(VkPushConstantRange const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PushConstantRange &setStageFlags(VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_) VULKAN_HPP_NOEXCEPT { stageFlags = stageFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PushConstantRange &setOffset(uint32_t offset_) VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 PushConstantRange &setSize(uint32_t size_) VULKAN_HPP_NOEXCEPT { size = size_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPushConstantRange const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPushConstantRange &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(stageFlags, offset, size); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PushConstantRange const &) const = default; #else bool operator==(PushConstantRange const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (stageFlags == rhs.stageFlags) && (offset == rhs.offset) && (size == rhs.size); # endif } bool operator!=(PushConstantRange const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; uint32_t offset = {}; uint32_t size = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PushConstantRange) == sizeof(VkPushConstantRange), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PushConstantRange is not nothrow_move_constructible!"); struct PipelineLayoutCreateInfo { using NativeType = VkPipelineLayoutCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLayoutCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo(VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ = {}, uint32_t setLayoutCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayout *pSetLayouts_ = {}, uint32_t pushConstantRangeCount_ = {}, const VULKAN_HPP_NAMESPACE::PushConstantRange *pPushConstantRanges_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), setLayoutCount(setLayoutCount_), pSetLayouts(pSetLayouts_), pushConstantRangeCount(pushConstantRangeCount_), pPushConstantRanges(pPushConstantRanges_) { } VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo(PipelineLayoutCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineLayoutCreateInfo(VkPipelineLayoutCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : PipelineLayoutCreateInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineLayoutCreateInfo(VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &setLayouts_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &pushConstantRanges_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , setLayoutCount(static_cast(setLayouts_.size())) , pSetLayouts(setLayouts_.data()) , pushConstantRangeCount(static_cast(pushConstantRanges_.size())) , pPushConstantRanges(pushConstantRanges_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineLayoutCreateInfo &operator=(PipelineLayoutCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineLayoutCreateInfo &operator=(VkPipelineLayoutCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo &setFlags(VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo &setSetLayoutCount(uint32_t setLayoutCount_) VULKAN_HPP_NOEXCEPT { setLayoutCount = setLayoutCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo &setPSetLayouts(const VULKAN_HPP_NAMESPACE::DescriptorSetLayout *pSetLayouts_) VULKAN_HPP_NOEXCEPT { pSetLayouts = pSetLayouts_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineLayoutCreateInfo & setSetLayouts(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &setLayouts_) VULKAN_HPP_NOEXCEPT { setLayoutCount = static_cast(setLayouts_.size()); pSetLayouts = setLayouts_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo &setPushConstantRangeCount(uint32_t pushConstantRangeCount_) VULKAN_HPP_NOEXCEPT { pushConstantRangeCount = pushConstantRangeCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPPushConstantRanges(const VULKAN_HPP_NAMESPACE::PushConstantRange *pPushConstantRanges_) VULKAN_HPP_NOEXCEPT { pPushConstantRanges = pPushConstantRanges_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineLayoutCreateInfo &setPushConstantRanges( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &pushConstantRanges_) VULKAN_HPP_NOEXCEPT { pushConstantRangeCount = static_cast(pushConstantRanges_.size()); pPushConstantRanges = pushConstantRanges_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, setLayoutCount, pSetLayouts, pushConstantRangeCount, pPushConstantRanges); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineLayoutCreateInfo const &) const = default; #else bool operator==(PipelineLayoutCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (setLayoutCount == rhs.setLayoutCount) && (pSetLayouts == rhs.pSetLayouts) && (pushConstantRangeCount == rhs.pushConstantRangeCount) && (pPushConstantRanges == rhs.pPushConstantRanges); # endif } bool operator!=(PipelineLayoutCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLayoutCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags = {}; uint32_t setLayoutCount = {}; const VULKAN_HPP_NAMESPACE::DescriptorSetLayout *pSetLayouts = {}; uint32_t pushConstantRangeCount = {}; const VULKAN_HPP_NAMESPACE::PushConstantRange *pPushConstantRanges = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo) == sizeof(VkPipelineLayoutCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineLayoutCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineLayoutCreateInfo; }; struct PipelineLibraryCreateInfoKHR { using NativeType = VkPipelineLibraryCreateInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLibraryCreateInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR(uint32_t libraryCount_ = {}, const VULKAN_HPP_NAMESPACE::Pipeline *pLibraries_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), libraryCount(libraryCount_), pLibraries(pLibraries_) { } VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR(PipelineLibraryCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineLibraryCreateInfoKHR(VkPipelineLibraryCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : PipelineLibraryCreateInfoKHR(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineLibraryCreateInfoKHR(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &libraries_, const void *pNext_ = nullptr) : pNext(pNext_) , libraryCount(static_cast(libraries_.size())) , pLibraries(libraries_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineLibraryCreateInfoKHR &operator=(PipelineLibraryCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineLibraryCreateInfoKHR &operator=(VkPipelineLibraryCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR &setLibraryCount(uint32_t libraryCount_) VULKAN_HPP_NOEXCEPT { libraryCount = libraryCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR &setPLibraries(const VULKAN_HPP_NAMESPACE::Pipeline *pLibraries_) VULKAN_HPP_NOEXCEPT { pLibraries = pLibraries_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineLibraryCreateInfoKHR & setLibraries(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &libraries_) VULKAN_HPP_NOEXCEPT { libraryCount = static_cast(libraries_.size()); pLibraries = libraries_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineLibraryCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineLibraryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, libraryCount, pLibraries); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineLibraryCreateInfoKHR const &) const = default; #else bool operator==(PipelineLibraryCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (libraryCount == rhs.libraryCount) && (pLibraries == rhs.pLibraries); # endif } bool operator!=(PipelineLibraryCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLibraryCreateInfoKHR; const void *pNext = {}; uint32_t libraryCount = {}; const VULKAN_HPP_NAMESPACE::Pipeline *pLibraries = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR) == sizeof(VkPipelineLibraryCreateInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineLibraryCreateInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineLibraryCreateInfoKHR; }; struct PipelineRasterizationConservativeStateCreateInfoEXT { using NativeType = VkPipelineRasterizationConservativeStateCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled, float extraPrimitiveOverestimationSize_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), conservativeRasterizationMode(conservativeRasterizationMode_), extraPrimitiveOverestimationSize(extraPrimitiveOverestimationSize_) { } VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT(PipelineRasterizationConservativeStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineRasterizationConservativeStateCreateInfoEXT(VkPipelineRasterizationConservativeStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : PipelineRasterizationConservativeStateCreateInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineRasterizationConservativeStateCreateInfoEXT & operator=(PipelineRasterizationConservativeStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineRasterizationConservativeStateCreateInfoEXT &operator=(VkPipelineRasterizationConservativeStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & setFlags(VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & setConservativeRasterizationMode(VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_) VULKAN_HPP_NOEXCEPT { conservativeRasterizationMode = conservativeRasterizationMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & setExtraPrimitiveOverestimationSize(float extraPrimitiveOverestimationSize_) VULKAN_HPP_NOEXCEPT { extraPrimitiveOverestimationSize = extraPrimitiveOverestimationSize_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineRasterizationConservativeStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineRasterizationConservativeStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, conservativeRasterizationMode, extraPrimitiveOverestimationSize); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineRasterizationConservativeStateCreateInfoEXT const &) const = default; #else bool operator==(PipelineRasterizationConservativeStateCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (conservativeRasterizationMode == rhs.conservativeRasterizationMode) && (extraPrimitiveOverestimationSize == rhs.extraPrimitiveOverestimationSize); # endif } bool operator!=(PipelineRasterizationConservativeStateCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags = {}; VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled; float extraPrimitiveOverestimationSize = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT) == sizeof(VkPipelineRasterizationConservativeStateCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineRasterizationConservativeStateCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineRasterizationConservativeStateCreateInfoEXT; }; struct PipelineRasterizationDepthClipStateCreateInfoEXT { using NativeType = VkPipelineRasterizationDepthClipStateCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), depthClipEnable(depthClipEnable_) { } VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT(PipelineRasterizationDepthClipStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineRasterizationDepthClipStateCreateInfoEXT(VkPipelineRasterizationDepthClipStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : PipelineRasterizationDepthClipStateCreateInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineRasterizationDepthClipStateCreateInfoEXT &operator=(PipelineRasterizationDepthClipStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineRasterizationDepthClipStateCreateInfoEXT &operator=(VkPipelineRasterizationDepthClipStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & setFlags(VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & setDepthClipEnable(VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_) VULKAN_HPP_NOEXCEPT { depthClipEnable = depthClipEnable_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineRasterizationDepthClipStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineRasterizationDepthClipStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, depthClipEnable); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineRasterizationDepthClipStateCreateInfoEXT const &) const = default; #else bool operator==(PipelineRasterizationDepthClipStateCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (depthClipEnable == rhs.depthClipEnable); # endif } bool operator!=(PipelineRasterizationDepthClipStateCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags = {}; VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT) == sizeof(VkPipelineRasterizationDepthClipStateCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineRasterizationDepthClipStateCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineRasterizationDepthClipStateCreateInfoEXT; }; struct PipelineRasterizationLineStateCreateInfoEXT { using NativeType = VkPipelineRasterizationLineStateCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ = VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault, VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ = {}, uint32_t lineStippleFactor_ = {}, uint16_t lineStipplePattern_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), lineRasterizationMode(lineRasterizationMode_), stippledLineEnable(stippledLineEnable_), lineStippleFactor(lineStippleFactor_), lineStipplePattern(lineStipplePattern_) { } VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT(PipelineRasterizationLineStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineRasterizationLineStateCreateInfoEXT(VkPipelineRasterizationLineStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : PipelineRasterizationLineStateCreateInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineRasterizationLineStateCreateInfoEXT &operator=(PipelineRasterizationLineStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineRasterizationLineStateCreateInfoEXT &operator=(VkPipelineRasterizationLineStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & setLineRasterizationMode(VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_) VULKAN_HPP_NOEXCEPT { lineRasterizationMode = lineRasterizationMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & setStippledLineEnable(VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_) VULKAN_HPP_NOEXCEPT { stippledLineEnable = stippledLineEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT &setLineStippleFactor(uint32_t lineStippleFactor_) VULKAN_HPP_NOEXCEPT { lineStippleFactor = lineStippleFactor_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT &setLineStipplePattern(uint16_t lineStipplePattern_) VULKAN_HPP_NOEXCEPT { lineStipplePattern = lineStipplePattern_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineRasterizationLineStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineRasterizationLineStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, lineRasterizationMode, stippledLineEnable, lineStippleFactor, lineStipplePattern); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineRasterizationLineStateCreateInfoEXT const &) const = default; #else bool operator==(PipelineRasterizationLineStateCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (lineRasterizationMode == rhs.lineRasterizationMode) && (stippledLineEnable == rhs.stippledLineEnable) && (lineStippleFactor == rhs.lineStippleFactor) && (lineStipplePattern == rhs.lineStipplePattern); # endif } bool operator!=(PipelineRasterizationLineStateCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode = VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault; VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable = {}; uint32_t lineStippleFactor = {}; uint16_t lineStipplePattern = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT) == sizeof(VkPipelineRasterizationLineStateCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineRasterizationLineStateCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineRasterizationLineStateCreateInfoEXT; }; struct PipelineRasterizationProvokingVertexStateCreateInfoEXT { using NativeType = VkPipelineRasterizationProvokingVertexStateCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineRasterizationProvokingVertexStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode_ = VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT::eFirstVertex, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), provokingVertexMode(provokingVertexMode_) { } VULKAN_HPP_CONSTEXPR PipelineRasterizationProvokingVertexStateCreateInfoEXT(PipelineRasterizationProvokingVertexStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineRasterizationProvokingVertexStateCreateInfoEXT(VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : PipelineRasterizationProvokingVertexStateCreateInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineRasterizationProvokingVertexStateCreateInfoEXT & operator=(PipelineRasterizationProvokingVertexStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineRasterizationProvokingVertexStateCreateInfoEXT & operator=(VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationProvokingVertexStateCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationProvokingVertexStateCreateInfoEXT & setProvokingVertexMode(VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode_) VULKAN_HPP_NOEXCEPT { provokingVertexMode = provokingVertexMode_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, provokingVertexMode); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineRasterizationProvokingVertexStateCreateInfoEXT const &) const = default; #else bool operator==(PipelineRasterizationProvokingVertexStateCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (provokingVertexMode == rhs.provokingVertexMode); # endif } bool operator!=(PipelineRasterizationProvokingVertexStateCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode = VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT::eFirstVertex; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT) == sizeof(VkPipelineRasterizationProvokingVertexStateCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineRasterizationProvokingVertexStateCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineRasterizationProvokingVertexStateCreateInfoEXT; }; struct PipelineRasterizationStateRasterizationOrderAMD { using NativeType = VkPipelineRasterizationStateRasterizationOrderAMD; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), rasterizationOrder(rasterizationOrder_) { } VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD(PipelineRasterizationStateRasterizationOrderAMD const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineRasterizationStateRasterizationOrderAMD(VkPipelineRasterizationStateRasterizationOrderAMD const &rhs) VULKAN_HPP_NOEXCEPT : PipelineRasterizationStateRasterizationOrderAMD(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineRasterizationStateRasterizationOrderAMD &operator=(PipelineRasterizationStateRasterizationOrderAMD const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineRasterizationStateRasterizationOrderAMD &operator=(VkPipelineRasterizationStateRasterizationOrderAMD const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateRasterizationOrderAMD &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateRasterizationOrderAMD & setRasterizationOrder(VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_) VULKAN_HPP_NOEXCEPT { rasterizationOrder = rasterizationOrder_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineRasterizationStateRasterizationOrderAMD const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineRasterizationStateRasterizationOrderAMD &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, rasterizationOrder); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineRasterizationStateRasterizationOrderAMD const &) const = default; #else bool operator==(PipelineRasterizationStateRasterizationOrderAMD const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (rasterizationOrder == rhs.rasterizationOrder); # endif } bool operator!=(PipelineRasterizationStateRasterizationOrderAMD const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD; const void *pNext = {}; VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD) == sizeof(VkPipelineRasterizationStateRasterizationOrderAMD), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineRasterizationStateRasterizationOrderAMD is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineRasterizationStateRasterizationOrderAMD; }; struct PipelineRasterizationStateStreamCreateInfoEXT { using NativeType = VkPipelineRasterizationStateStreamCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = {}, uint32_t rasterizationStream_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), rasterizationStream(rasterizationStream_) { } VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT(PipelineRasterizationStateStreamCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineRasterizationStateStreamCreateInfoEXT(VkPipelineRasterizationStateStreamCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : PipelineRasterizationStateStreamCreateInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineRasterizationStateStreamCreateInfoEXT &operator=(PipelineRasterizationStateStreamCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineRasterizationStateStreamCreateInfoEXT &operator=(VkPipelineRasterizationStateStreamCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & setFlags(VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT &setRasterizationStream(uint32_t rasterizationStream_) VULKAN_HPP_NOEXCEPT { rasterizationStream = rasterizationStream_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineRasterizationStateStreamCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineRasterizationStateStreamCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, rasterizationStream); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineRasterizationStateStreamCreateInfoEXT const &) const = default; #else bool operator==(PipelineRasterizationStateStreamCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (rasterizationStream == rhs.rasterizationStream); # endif } bool operator!=(PipelineRasterizationStateStreamCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags = {}; uint32_t rasterizationStream = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT) == sizeof(VkPipelineRasterizationStateStreamCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineRasterizationStateStreamCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineRasterizationStateStreamCreateInfoEXT; }; struct PipelineRenderingCreateInfo { using NativeType = VkPipelineRenderingCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRenderingCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineRenderingCreateInfo(uint32_t viewMask_ = {}, uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::Format *pColorAttachmentFormats_ = {}, VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), viewMask(viewMask_), colorAttachmentCount(colorAttachmentCount_), pColorAttachmentFormats(pColorAttachmentFormats_), depthAttachmentFormat(depthAttachmentFormat_), stencilAttachmentFormat(stencilAttachmentFormat_) { } VULKAN_HPP_CONSTEXPR PipelineRenderingCreateInfo(PipelineRenderingCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineRenderingCreateInfo(VkPipelineRenderingCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : PipelineRenderingCreateInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineRenderingCreateInfo(uint32_t viewMask_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &colorAttachmentFormats_, VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, const void *pNext_ = nullptr) : pNext(pNext_) , viewMask(viewMask_) , colorAttachmentCount(static_cast(colorAttachmentFormats_.size())) , pColorAttachmentFormats(colorAttachmentFormats_.data()) , depthAttachmentFormat(depthAttachmentFormat_) , stencilAttachmentFormat(stencilAttachmentFormat_) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineRenderingCreateInfo &operator=(PipelineRenderingCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineRenderingCreateInfo &operator=(VkPipelineRenderingCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo &setViewMask(uint32_t viewMask_) VULKAN_HPP_NOEXCEPT { viewMask = viewMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo &setColorAttachmentCount(uint32_t colorAttachmentCount_) VULKAN_HPP_NOEXCEPT { colorAttachmentCount = colorAttachmentCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setPColorAttachmentFormats(const VULKAN_HPP_NAMESPACE::Format *pColorAttachmentFormats_) VULKAN_HPP_NOEXCEPT { pColorAttachmentFormats = pColorAttachmentFormats_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineRenderingCreateInfo &setColorAttachmentFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &colorAttachmentFormats_) VULKAN_HPP_NOEXCEPT { colorAttachmentCount = static_cast(colorAttachmentFormats_.size()); pColorAttachmentFormats = colorAttachmentFormats_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo &setDepthAttachmentFormat(VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_) VULKAN_HPP_NOEXCEPT { depthAttachmentFormat = depthAttachmentFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setStencilAttachmentFormat(VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_) VULKAN_HPP_NOEXCEPT { stencilAttachmentFormat = stencilAttachmentFormat_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineRenderingCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineRenderingCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, viewMask, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineRenderingCreateInfo const &) const = default; #else bool operator==(PipelineRenderingCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (viewMask == rhs.viewMask) && (colorAttachmentCount == rhs.colorAttachmentCount) && (pColorAttachmentFormats == rhs.pColorAttachmentFormats) && (depthAttachmentFormat == rhs.depthAttachmentFormat) && (stencilAttachmentFormat == rhs.stencilAttachmentFormat); # endif } bool operator!=(PipelineRenderingCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRenderingCreateInfo; const void *pNext = {}; uint32_t viewMask = {}; uint32_t colorAttachmentCount = {}; const VULKAN_HPP_NAMESPACE::Format *pColorAttachmentFormats = {}; VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo) == sizeof(VkPipelineRenderingCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineRenderingCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineRenderingCreateInfo; }; using PipelineRenderingCreateInfoKHR = PipelineRenderingCreateInfo; struct PipelineRepresentativeFragmentTestStateCreateInfoNV { using NativeType = VkPipelineRepresentativeFragmentTestStateCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), representativeFragmentTestEnable(representativeFragmentTestEnable_) { } VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV(PipelineRepresentativeFragmentTestStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineRepresentativeFragmentTestStateCreateInfoNV(VkPipelineRepresentativeFragmentTestStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : PipelineRepresentativeFragmentTestStateCreateInfoNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineRepresentativeFragmentTestStateCreateInfoNV & operator=(PipelineRepresentativeFragmentTestStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineRepresentativeFragmentTestStateCreateInfoNV &operator=(VkPipelineRepresentativeFragmentTestStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineRepresentativeFragmentTestStateCreateInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRepresentativeFragmentTestStateCreateInfoNV & setRepresentativeFragmentTestEnable(VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_) VULKAN_HPP_NOEXCEPT { representativeFragmentTestEnable = representativeFragmentTestEnable_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, representativeFragmentTestEnable); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineRepresentativeFragmentTestStateCreateInfoNV const &) const = default; #else bool operator==(PipelineRepresentativeFragmentTestStateCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (representativeFragmentTestEnable == rhs.representativeFragmentTestEnable); # endif } bool operator!=(PipelineRepresentativeFragmentTestStateCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV) == sizeof(VkPipelineRepresentativeFragmentTestStateCreateInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineRepresentativeFragmentTestStateCreateInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineRepresentativeFragmentTestStateCreateInfoNV; }; struct PipelineSampleLocationsStateCreateInfoEXT { using NativeType = VkPipelineSampleLocationsStateCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ = {}, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), sampleLocationsEnable(sampleLocationsEnable_), sampleLocationsInfo(sampleLocationsInfo_) { } VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT(PipelineSampleLocationsStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineSampleLocationsStateCreateInfoEXT(VkPipelineSampleLocationsStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : PipelineSampleLocationsStateCreateInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineSampleLocationsStateCreateInfoEXT &operator=(PipelineSampleLocationsStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineSampleLocationsStateCreateInfoEXT &operator=(VkPipelineSampleLocationsStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsEnable(VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_) VULKAN_HPP_NOEXCEPT { sampleLocationsEnable = sampleLocationsEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsInfo(VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const &sampleLocationsInfo_) VULKAN_HPP_NOEXCEPT { sampleLocationsInfo = sampleLocationsInfo_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineSampleLocationsStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineSampleLocationsStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, sampleLocationsEnable, sampleLocationsInfo); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineSampleLocationsStateCreateInfoEXT const &) const = default; #else bool operator==(PipelineSampleLocationsStateCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (sampleLocationsEnable == rhs.sampleLocationsEnable) && (sampleLocationsInfo == rhs.sampleLocationsInfo); # endif } bool operator!=(PipelineSampleLocationsStateCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable = {}; VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT) == sizeof(VkPipelineSampleLocationsStateCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineSampleLocationsStateCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineSampleLocationsStateCreateInfoEXT; }; struct PipelineShaderStageRequiredSubgroupSizeCreateInfo { using NativeType = VkPipelineShaderStageRequiredSubgroupSizeCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfo(uint32_t requiredSubgroupSize_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), requiredSubgroupSize(requiredSubgroupSize_) { } VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfo(PipelineShaderStageRequiredSubgroupSizeCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineShaderStageRequiredSubgroupSizeCreateInfo(VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : PipelineShaderStageRequiredSubgroupSizeCreateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineShaderStageRequiredSubgroupSizeCreateInfo & operator=(PipelineShaderStageRequiredSubgroupSizeCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineShaderStageRequiredSubgroupSizeCreateInfo &operator=(VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, requiredSubgroupSize); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineShaderStageRequiredSubgroupSizeCreateInfo const &) const = default; #else bool operator==(PipelineShaderStageRequiredSubgroupSizeCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (requiredSubgroupSize == rhs.requiredSubgroupSize); # endif } bool operator!=(PipelineShaderStageRequiredSubgroupSizeCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo; void *pNext = {}; uint32_t requiredSubgroupSize = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo) == sizeof(VkPipelineShaderStageRequiredSubgroupSizeCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineShaderStageRequiredSubgroupSizeCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineShaderStageRequiredSubgroupSizeCreateInfo; }; using PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo; struct PipelineTessellationDomainOriginStateCreateInfo { using NativeType = VkPipelineTessellationDomainOriginStateCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), domainOrigin(domainOrigin_) { } VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo(PipelineTessellationDomainOriginStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineTessellationDomainOriginStateCreateInfo(VkPipelineTessellationDomainOriginStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : PipelineTessellationDomainOriginStateCreateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineTessellationDomainOriginStateCreateInfo &operator=(PipelineTessellationDomainOriginStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineTessellationDomainOriginStateCreateInfo &operator=(VkPipelineTessellationDomainOriginStateCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineTessellationDomainOriginStateCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineTessellationDomainOriginStateCreateInfo & setDomainOrigin(VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_) VULKAN_HPP_NOEXCEPT { domainOrigin = domainOrigin_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineTessellationDomainOriginStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineTessellationDomainOriginStateCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, domainOrigin); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineTessellationDomainOriginStateCreateInfo const &) const = default; #else bool operator==(PipelineTessellationDomainOriginStateCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (domainOrigin == rhs.domainOrigin); # endif } bool operator!=(PipelineTessellationDomainOriginStateCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo) == sizeof(VkPipelineTessellationDomainOriginStateCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineTessellationDomainOriginStateCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineTessellationDomainOriginStateCreateInfo; }; using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo; struct VertexInputBindingDivisorDescriptionEXT { using NativeType = VkVertexInputBindingDivisorDescriptionEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT(uint32_t binding_ = {}, uint32_t divisor_ = {}) VULKAN_HPP_NOEXCEPT : binding(binding_), divisor(divisor_) { } VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT(VertexInputBindingDivisorDescriptionEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VertexInputBindingDivisorDescriptionEXT(VkVertexInputBindingDivisorDescriptionEXT const &rhs) VULKAN_HPP_NOEXCEPT : VertexInputBindingDivisorDescriptionEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VertexInputBindingDivisorDescriptionEXT &operator=(VertexInputBindingDivisorDescriptionEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VertexInputBindingDivisorDescriptionEXT &operator=(VkVertexInputBindingDivisorDescriptionEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescriptionEXT &setBinding(uint32_t binding_) VULKAN_HPP_NOEXCEPT { binding = binding_; return *this; } VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescriptionEXT &setDivisor(uint32_t divisor_) VULKAN_HPP_NOEXCEPT { divisor = divisor_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVertexInputBindingDivisorDescriptionEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVertexInputBindingDivisorDescriptionEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(binding, divisor); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VertexInputBindingDivisorDescriptionEXT const &) const = default; #else bool operator==(VertexInputBindingDivisorDescriptionEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (binding == rhs.binding) && (divisor == rhs.divisor); # endif } bool operator!=(VertexInputBindingDivisorDescriptionEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t binding = {}; uint32_t divisor = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT) == sizeof(VkVertexInputBindingDivisorDescriptionEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VertexInputBindingDivisorDescriptionEXT is not nothrow_move_constructible!"); struct PipelineVertexInputDivisorStateCreateInfoEXT { using NativeType = VkPipelineVertexInputDivisorStateCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT(uint32_t vertexBindingDivisorCount_ = {}, const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT *pVertexBindingDivisors_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), vertexBindingDivisorCount(vertexBindingDivisorCount_), pVertexBindingDivisors(pVertexBindingDivisors_) { } VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT(PipelineVertexInputDivisorStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineVertexInputDivisorStateCreateInfoEXT(VkPipelineVertexInputDivisorStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : PipelineVertexInputDivisorStateCreateInfoEXT(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineVertexInputDivisorStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &vertexBindingDivisors_, const void *pNext_ = nullptr) : pNext(pNext_) , vertexBindingDivisorCount(static_cast(vertexBindingDivisors_.size())) , pVertexBindingDivisors(vertexBindingDivisors_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineVertexInputDivisorStateCreateInfoEXT &operator=(PipelineVertexInputDivisorStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineVertexInputDivisorStateCreateInfoEXT &operator=(VkPipelineVertexInputDivisorStateCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfoEXT & setVertexBindingDivisorCount(uint32_t vertexBindingDivisorCount_) VULKAN_HPP_NOEXCEPT { vertexBindingDivisorCount = vertexBindingDivisorCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfoEXT & setPVertexBindingDivisors(const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT *pVertexBindingDivisors_) VULKAN_HPP_NOEXCEPT { pVertexBindingDivisors = pVertexBindingDivisors_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineVertexInputDivisorStateCreateInfoEXT &setVertexBindingDivisors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &vertexBindingDivisors_) VULKAN_HPP_NOEXCEPT { vertexBindingDivisorCount = static_cast(vertexBindingDivisors_.size()); pVertexBindingDivisors = vertexBindingDivisors_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineVertexInputDivisorStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineVertexInputDivisorStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, vertexBindingDivisorCount, pVertexBindingDivisors); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineVertexInputDivisorStateCreateInfoEXT const &) const = default; #else bool operator==(PipelineVertexInputDivisorStateCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (vertexBindingDivisorCount == rhs.vertexBindingDivisorCount) && (pVertexBindingDivisors == rhs.pVertexBindingDivisors); # endif } bool operator!=(PipelineVertexInputDivisorStateCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT; const void *pNext = {}; uint32_t vertexBindingDivisorCount = {}; const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT *pVertexBindingDivisors = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT) == sizeof(VkPipelineVertexInputDivisorStateCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineVertexInputDivisorStateCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineVertexInputDivisorStateCreateInfoEXT; }; struct PipelineViewportCoarseSampleOrderStateCreateInfoNV { using NativeType = VkPipelineViewportCoarseSampleOrderStateCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault, uint32_t customSampleOrderCount_ = {}, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV *pCustomSampleOrders_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), sampleOrderType(sampleOrderType_), customSampleOrderCount(customSampleOrderCount_), pCustomSampleOrders(pCustomSampleOrders_) { } VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV(PipelineViewportCoarseSampleOrderStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineViewportCoarseSampleOrderStateCreateInfoNV(VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : PipelineViewportCoarseSampleOrderStateCreateInfoNV(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineViewportCoarseSampleOrderStateCreateInfoNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &customSampleOrders_, const void *pNext_ = nullptr) : pNext(pNext_) , sampleOrderType(sampleOrderType_) , customSampleOrderCount(static_cast(customSampleOrders_.size())) , pCustomSampleOrders(customSampleOrders_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineViewportCoarseSampleOrderStateCreateInfoNV & operator=(PipelineViewportCoarseSampleOrderStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineViewportCoarseSampleOrderStateCreateInfoNV &operator=(VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & setSampleOrderType(VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_) VULKAN_HPP_NOEXCEPT { sampleOrderType = sampleOrderType_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & setCustomSampleOrderCount(uint32_t customSampleOrderCount_) VULKAN_HPP_NOEXCEPT { customSampleOrderCount = customSampleOrderCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPCustomSampleOrders(const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV *pCustomSampleOrders_) VULKAN_HPP_NOEXCEPT { pCustomSampleOrders = pCustomSampleOrders_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineViewportCoarseSampleOrderStateCreateInfoNV &setCustomSampleOrders( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &customSampleOrders_) VULKAN_HPP_NOEXCEPT { customSampleOrderCount = static_cast(customSampleOrders_.size()); pCustomSampleOrders = customSampleOrders_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, sampleOrderType, customSampleOrderCount, pCustomSampleOrders); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineViewportCoarseSampleOrderStateCreateInfoNV const &) const = default; #else bool operator==(PipelineViewportCoarseSampleOrderStateCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (sampleOrderType == rhs.sampleOrderType) && (customSampleOrderCount == rhs.customSampleOrderCount) && (pCustomSampleOrders == rhs.pCustomSampleOrders); # endif } bool operator!=(PipelineViewportCoarseSampleOrderStateCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault; uint32_t customSampleOrderCount = {}; const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV *pCustomSampleOrders = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV) == sizeof(VkPipelineViewportCoarseSampleOrderStateCreateInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineViewportCoarseSampleOrderStateCreateInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineViewportCoarseSampleOrderStateCreateInfoNV; }; struct PipelineViewportDepthClipControlCreateInfoEXT { using NativeType = VkPipelineViewportDepthClipControlCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportDepthClipControlCreateInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineViewportDepthClipControlCreateInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), negativeOneToOne(negativeOneToOne_) { } VULKAN_HPP_CONSTEXPR PipelineViewportDepthClipControlCreateInfoEXT(PipelineViewportDepthClipControlCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineViewportDepthClipControlCreateInfoEXT(VkPipelineViewportDepthClipControlCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : PipelineViewportDepthClipControlCreateInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineViewportDepthClipControlCreateInfoEXT &operator=(PipelineViewportDepthClipControlCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineViewportDepthClipControlCreateInfoEXT &operator=(VkPipelineViewportDepthClipControlCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClipControlCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClipControlCreateInfoEXT & setNegativeOneToOne(VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne_) VULKAN_HPP_NOEXCEPT { negativeOneToOne = negativeOneToOne_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineViewportDepthClipControlCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineViewportDepthClipControlCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, negativeOneToOne); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineViewportDepthClipControlCreateInfoEXT const &) const = default; #else bool operator==(PipelineViewportDepthClipControlCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (negativeOneToOne == rhs.negativeOneToOne); # endif } bool operator!=(PipelineViewportDepthClipControlCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportDepthClipControlCreateInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT) == sizeof(VkPipelineViewportDepthClipControlCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineViewportDepthClipControlCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineViewportDepthClipControlCreateInfoEXT; }; struct PipelineViewportExclusiveScissorStateCreateInfoNV { using NativeType = VkPipelineViewportExclusiveScissorStateCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV(uint32_t exclusiveScissorCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D *pExclusiveScissors_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), exclusiveScissorCount(exclusiveScissorCount_), pExclusiveScissors(pExclusiveScissors_) { } VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV(PipelineViewportExclusiveScissorStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineViewportExclusiveScissorStateCreateInfoNV(VkPipelineViewportExclusiveScissorStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : PipelineViewportExclusiveScissorStateCreateInfoNV(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineViewportExclusiveScissorStateCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &exclusiveScissors_, const void *pNext_ = nullptr) : pNext(pNext_) , exclusiveScissorCount(static_cast(exclusiveScissors_.size())) , pExclusiveScissors(exclusiveScissors_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineViewportExclusiveScissorStateCreateInfoNV & operator=(PipelineViewportExclusiveScissorStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineViewportExclusiveScissorStateCreateInfoNV &operator=(VkPipelineViewportExclusiveScissorStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV &setExclusiveScissorCount(uint32_t exclusiveScissorCount_) VULKAN_HPP_NOEXCEPT { exclusiveScissorCount = exclusiveScissorCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & setPExclusiveScissors(const VULKAN_HPP_NAMESPACE::Rect2D *pExclusiveScissors_) VULKAN_HPP_NOEXCEPT { pExclusiveScissors = pExclusiveScissors_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineViewportExclusiveScissorStateCreateInfoNV & setExclusiveScissors(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &exclusiveScissors_) VULKAN_HPP_NOEXCEPT { exclusiveScissorCount = static_cast(exclusiveScissors_.size()); pExclusiveScissors = exclusiveScissors_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineViewportExclusiveScissorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineViewportExclusiveScissorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, exclusiveScissorCount, pExclusiveScissors); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineViewportExclusiveScissorStateCreateInfoNV const &) const = default; #else bool operator==(PipelineViewportExclusiveScissorStateCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (exclusiveScissorCount == rhs.exclusiveScissorCount) && (pExclusiveScissors == rhs.pExclusiveScissors); # endif } bool operator!=(PipelineViewportExclusiveScissorStateCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV; const void *pNext = {}; uint32_t exclusiveScissorCount = {}; const VULKAN_HPP_NAMESPACE::Rect2D *pExclusiveScissors = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV) == sizeof(VkPipelineViewportExclusiveScissorStateCreateInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineViewportExclusiveScissorStateCreateInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineViewportExclusiveScissorStateCreateInfoNV; }; struct ShadingRatePaletteNV { using NativeType = VkShadingRatePaletteNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV(uint32_t shadingRatePaletteEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV *pShadingRatePaletteEntries_ = {}) VULKAN_HPP_NOEXCEPT : shadingRatePaletteEntryCount(shadingRatePaletteEntryCount_), pShadingRatePaletteEntries(pShadingRatePaletteEntries_) { } VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV(ShadingRatePaletteNV const &rhs) VULKAN_HPP_NOEXCEPT = default; ShadingRatePaletteNV(VkShadingRatePaletteNV const &rhs) VULKAN_HPP_NOEXCEPT : ShadingRatePaletteNV(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) ShadingRatePaletteNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &shadingRatePaletteEntries_) : shadingRatePaletteEntryCount(static_cast(shadingRatePaletteEntries_.size())) , pShadingRatePaletteEntries(shadingRatePaletteEntries_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ShadingRatePaletteNV &operator=(ShadingRatePaletteNV const &rhs) VULKAN_HPP_NOEXCEPT = default; ShadingRatePaletteNV &operator=(VkShadingRatePaletteNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV &setShadingRatePaletteEntryCount(uint32_t shadingRatePaletteEntryCount_) VULKAN_HPP_NOEXCEPT { shadingRatePaletteEntryCount = shadingRatePaletteEntryCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV & setPShadingRatePaletteEntries(const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV *pShadingRatePaletteEntries_) VULKAN_HPP_NOEXCEPT { pShadingRatePaletteEntries = pShadingRatePaletteEntries_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) ShadingRatePaletteNV &setShadingRatePaletteEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &shadingRatePaletteEntries_) VULKAN_HPP_NOEXCEPT { shadingRatePaletteEntryCount = static_cast(shadingRatePaletteEntries_.size()); pShadingRatePaletteEntries = shadingRatePaletteEntries_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkShadingRatePaletteNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkShadingRatePaletteNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(shadingRatePaletteEntryCount, pShadingRatePaletteEntries); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ShadingRatePaletteNV const &) const = default; #else bool operator==(ShadingRatePaletteNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount) && (pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries); # endif } bool operator!=(ShadingRatePaletteNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t shadingRatePaletteEntryCount = {}; const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV *pShadingRatePaletteEntries = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV) == sizeof(VkShadingRatePaletteNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ShadingRatePaletteNV is not nothrow_move_constructible!"); struct PipelineViewportShadingRateImageStateCreateInfoNV { using NativeType = VkPipelineViewportShadingRateImageStateCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV *pShadingRatePalettes_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), shadingRateImageEnable(shadingRateImageEnable_), viewportCount(viewportCount_), pShadingRatePalettes(pShadingRatePalettes_) { } VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV(PipelineViewportShadingRateImageStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineViewportShadingRateImageStateCreateInfoNV(VkPipelineViewportShadingRateImageStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : PipelineViewportShadingRateImageStateCreateInfoNV(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineViewportShadingRateImageStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &shadingRatePalettes_, const void *pNext_ = nullptr) : pNext(pNext_) , shadingRateImageEnable(shadingRateImageEnable_) , viewportCount(static_cast(shadingRatePalettes_.size())) , pShadingRatePalettes(shadingRatePalettes_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineViewportShadingRateImageStateCreateInfoNV & operator=(PipelineViewportShadingRateImageStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineViewportShadingRateImageStateCreateInfoNV &operator=(VkPipelineViewportShadingRateImageStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & setShadingRateImageEnable(VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_) VULKAN_HPP_NOEXCEPT { shadingRateImageEnable = shadingRateImageEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV &setViewportCount(uint32_t viewportCount_) VULKAN_HPP_NOEXCEPT { viewportCount = viewportCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & setPShadingRatePalettes(const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV *pShadingRatePalettes_) VULKAN_HPP_NOEXCEPT { pShadingRatePalettes = pShadingRatePalettes_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineViewportShadingRateImageStateCreateInfoNV &setShadingRatePalettes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &shadingRatePalettes_) VULKAN_HPP_NOEXCEPT { viewportCount = static_cast(shadingRatePalettes_.size()); pShadingRatePalettes = shadingRatePalettes_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineViewportShadingRateImageStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineViewportShadingRateImageStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, shadingRateImageEnable, viewportCount, pShadingRatePalettes); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineViewportShadingRateImageStateCreateInfoNV const &) const = default; #else bool operator==(PipelineViewportShadingRateImageStateCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (shadingRateImageEnable == rhs.shadingRateImageEnable) && (viewportCount == rhs.viewportCount) && (pShadingRatePalettes == rhs.pShadingRatePalettes); # endif } bool operator!=(PipelineViewportShadingRateImageStateCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable = {}; uint32_t viewportCount = {}; const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV *pShadingRatePalettes = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV) == sizeof(VkPipelineViewportShadingRateImageStateCreateInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineViewportShadingRateImageStateCreateInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineViewportShadingRateImageStateCreateInfoNV; }; struct ViewportSwizzleNV { using NativeType = VkViewportSwizzleNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX) VULKAN_HPP_NOEXCEPT : x(x_), y(y_), z(z_), w(w_) { } VULKAN_HPP_CONSTEXPR ViewportSwizzleNV(ViewportSwizzleNV const &rhs) VULKAN_HPP_NOEXCEPT = default; ViewportSwizzleNV(VkViewportSwizzleNV const &rhs) VULKAN_HPP_NOEXCEPT : ViewportSwizzleNV(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ViewportSwizzleNV &operator=(ViewportSwizzleNV const &rhs) VULKAN_HPP_NOEXCEPT = default; ViewportSwizzleNV &operator=(VkViewportSwizzleNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV &setX(VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_) VULKAN_HPP_NOEXCEPT { x = x_; return *this; } VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV &setY(VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_) VULKAN_HPP_NOEXCEPT { y = y_; return *this; } VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV &setZ(VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_) VULKAN_HPP_NOEXCEPT { z = z_; return *this; } VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV &setW(VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_) VULKAN_HPP_NOEXCEPT { w = w_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkViewportSwizzleNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkViewportSwizzleNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(x, y, z, w); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ViewportSwizzleNV const &) const = default; #else bool operator==(ViewportSwizzleNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (x == rhs.x) && (y == rhs.y) && (z == rhs.z) && (w == rhs.w); # endif } bool operator!=(ViewportSwizzleNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ViewportSwizzleNV) == sizeof(VkViewportSwizzleNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ViewportSwizzleNV is not nothrow_move_constructible!"); struct PipelineViewportSwizzleStateCreateInfoNV { using NativeType = VkPipelineViewportSwizzleStateCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV *pViewportSwizzles_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), viewportCount(viewportCount_), pViewportSwizzles(pViewportSwizzles_) { } VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV(PipelineViewportSwizzleStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineViewportSwizzleStateCreateInfoNV(VkPipelineViewportSwizzleStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : PipelineViewportSwizzleStateCreateInfoNV(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineViewportSwizzleStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &viewportSwizzles_, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , viewportCount(static_cast(viewportSwizzles_.size())) , pViewportSwizzles(viewportSwizzles_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineViewportSwizzleStateCreateInfoNV &operator=(PipelineViewportSwizzleStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineViewportSwizzleStateCreateInfoNV &operator=(VkPipelineViewportSwizzleStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & setFlags(VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV &setViewportCount(uint32_t viewportCount_) VULKAN_HPP_NOEXCEPT { viewportCount = viewportCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & setPViewportSwizzles(const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV *pViewportSwizzles_) VULKAN_HPP_NOEXCEPT { pViewportSwizzles = pViewportSwizzles_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineViewportSwizzleStateCreateInfoNV &setViewportSwizzles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &viewportSwizzles_) VULKAN_HPP_NOEXCEPT { viewportCount = static_cast(viewportSwizzles_.size()); pViewportSwizzles = viewportSwizzles_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineViewportSwizzleStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineViewportSwizzleStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, viewportCount, pViewportSwizzles); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineViewportSwizzleStateCreateInfoNV const &) const = default; #else bool operator==(PipelineViewportSwizzleStateCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (viewportCount == rhs.viewportCount) && (pViewportSwizzles == rhs.pViewportSwizzles); # endif } bool operator!=(PipelineViewportSwizzleStateCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags = {}; uint32_t viewportCount = {}; const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV *pViewportSwizzles = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV) == sizeof(VkPipelineViewportSwizzleStateCreateInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineViewportSwizzleStateCreateInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineViewportSwizzleStateCreateInfoNV; }; struct ViewportWScalingNV { using NativeType = VkViewportWScalingNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ViewportWScalingNV(float xcoeff_ = {}, float ycoeff_ = {}) VULKAN_HPP_NOEXCEPT : xcoeff(xcoeff_), ycoeff(ycoeff_) { } VULKAN_HPP_CONSTEXPR ViewportWScalingNV(ViewportWScalingNV const &rhs) VULKAN_HPP_NOEXCEPT = default; ViewportWScalingNV(VkViewportWScalingNV const &rhs) VULKAN_HPP_NOEXCEPT : ViewportWScalingNV(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ViewportWScalingNV &operator=(ViewportWScalingNV const &rhs) VULKAN_HPP_NOEXCEPT = default; ViewportWScalingNV &operator=(VkViewportWScalingNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ViewportWScalingNV &setXcoeff(float xcoeff_) VULKAN_HPP_NOEXCEPT { xcoeff = xcoeff_; return *this; } VULKAN_HPP_CONSTEXPR_14 ViewportWScalingNV &setYcoeff(float ycoeff_) VULKAN_HPP_NOEXCEPT { ycoeff = ycoeff_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkViewportWScalingNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkViewportWScalingNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(xcoeff, ycoeff); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ViewportWScalingNV const &) const = default; #else bool operator==(ViewportWScalingNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (xcoeff == rhs.xcoeff) && (ycoeff == rhs.ycoeff); # endif } bool operator!=(ViewportWScalingNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: float xcoeff = {}; float ycoeff = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ViewportWScalingNV) == sizeof(VkViewportWScalingNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ViewportWScalingNV is not nothrow_move_constructible!"); struct PipelineViewportWScalingStateCreateInfoNV { using NativeType = VkPipelineViewportWScalingStateCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportWScalingStateCreateInfoNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV *pViewportWScalings_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), viewportWScalingEnable(viewportWScalingEnable_), viewportCount(viewportCount_), pViewportWScalings(pViewportWScalings_) { } VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV(PipelineViewportWScalingStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineViewportWScalingStateCreateInfoNV(VkPipelineViewportWScalingStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : PipelineViewportWScalingStateCreateInfoNV(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineViewportWScalingStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &viewportWScalings_, const void *pNext_ = nullptr) : pNext(pNext_) , viewportWScalingEnable(viewportWScalingEnable_) , viewportCount(static_cast(viewportWScalings_.size())) , pViewportWScalings(viewportWScalings_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PipelineViewportWScalingStateCreateInfoNV &operator=(PipelineViewportWScalingStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; PipelineViewportWScalingStateCreateInfoNV &operator=(VkPipelineViewportWScalingStateCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & setViewportWScalingEnable(VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_) VULKAN_HPP_NOEXCEPT { viewportWScalingEnable = viewportWScalingEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV &setViewportCount(uint32_t viewportCount_) VULKAN_HPP_NOEXCEPT { viewportCount = viewportCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & setPViewportWScalings(const VULKAN_HPP_NAMESPACE::ViewportWScalingNV *pViewportWScalings_) VULKAN_HPP_NOEXCEPT { pViewportWScalings = pViewportWScalings_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PipelineViewportWScalingStateCreateInfoNV &setViewportWScalings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &viewportWScalings_) VULKAN_HPP_NOEXCEPT { viewportCount = static_cast(viewportWScalings_.size()); pViewportWScalings = viewportWScalings_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPipelineViewportWScalingStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPipelineViewportWScalingStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, viewportWScalingEnable, viewportCount, pViewportWScalings); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PipelineViewportWScalingStateCreateInfoNV const &) const = default; #else bool operator==(PipelineViewportWScalingStateCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (viewportWScalingEnable == rhs.viewportWScalingEnable) && (viewportCount == rhs.viewportCount) && (pViewportWScalings == rhs.pViewportWScalings); # endif } bool operator!=(PipelineViewportWScalingStateCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportWScalingStateCreateInfoNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable = {}; uint32_t viewportCount = {}; const VULKAN_HPP_NAMESPACE::ViewportWScalingNV *pViewportWScalings = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV) == sizeof(VkPipelineViewportWScalingStateCreateInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PipelineViewportWScalingStateCreateInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = PipelineViewportWScalingStateCreateInfoNV; }; #if defined(VK_USE_PLATFORM_GGP) struct PresentFrameTokenGGP { using NativeType = VkPresentFrameTokenGGP; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentFrameTokenGGP; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP(GgpFrameToken frameToken_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), frameToken(frameToken_) { } VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP(PresentFrameTokenGGP const &rhs) VULKAN_HPP_NOEXCEPT = default; PresentFrameTokenGGP(VkPresentFrameTokenGGP const &rhs) VULKAN_HPP_NOEXCEPT : PresentFrameTokenGGP(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PresentFrameTokenGGP &operator=(PresentFrameTokenGGP const &rhs) VULKAN_HPP_NOEXCEPT = default; PresentFrameTokenGGP &operator=(VkPresentFrameTokenGGP const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PresentFrameTokenGGP &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PresentFrameTokenGGP &setFrameToken(GgpFrameToken frameToken_) VULKAN_HPP_NOEXCEPT { frameToken = frameToken_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPresentFrameTokenGGP const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPresentFrameTokenGGP &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, frameToken); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) std::strong_ordering operator<=>(PresentFrameTokenGGP const &rhs) const VULKAN_HPP_NOEXCEPT { if(auto cmp = sType <=> rhs.sType; cmp != 0) return cmp; if(auto cmp = pNext <=> rhs.pNext; cmp != 0) return cmp; if(auto cmp = memcmp(&frameToken, &rhs.frameToken, sizeof(GgpFrameToken)); cmp != 0) return (cmp < 0) ? std::strong_ordering::less : std::strong_ordering::greater; return std::strong_ordering::equivalent; } # endif bool operator==(PresentFrameTokenGGP const &rhs) const VULKAN_HPP_NOEXCEPT { return (sType == rhs.sType) && (pNext == rhs.pNext) && (memcmp(&frameToken, &rhs.frameToken, sizeof(GgpFrameToken)) == 0); } bool operator!=(PresentFrameTokenGGP const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentFrameTokenGGP; const void *pNext = {}; GgpFrameToken frameToken = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP) == sizeof(VkPresentFrameTokenGGP), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PresentFrameTokenGGP is not nothrow_move_constructible!"); template<> struct CppType { using Type = PresentFrameTokenGGP; }; #endif /*VK_USE_PLATFORM_GGP*/ struct PresentIdKHR { using NativeType = VkPresentIdKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentIdKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PresentIdKHR(uint32_t swapchainCount_ = {}, const uint64_t *pPresentIds_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), swapchainCount(swapchainCount_), pPresentIds(pPresentIds_) { } VULKAN_HPP_CONSTEXPR PresentIdKHR(PresentIdKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PresentIdKHR(VkPresentIdKHR const &rhs) VULKAN_HPP_NOEXCEPT : PresentIdKHR(*reinterpret_cast(&rhs)) {} # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PresentIdKHR(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &presentIds_, const void *pNext_ = nullptr) : pNext(pNext_) , swapchainCount(static_cast(presentIds_.size())) , pPresentIds(presentIds_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PresentIdKHR &operator=(PresentIdKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PresentIdKHR &operator=(VkPresentIdKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PresentIdKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PresentIdKHR &setSwapchainCount(uint32_t swapchainCount_) VULKAN_HPP_NOEXCEPT { swapchainCount = swapchainCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PresentIdKHR &setPPresentIds(const uint64_t *pPresentIds_) VULKAN_HPP_NOEXCEPT { pPresentIds = pPresentIds_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PresentIdKHR &setPresentIds(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &presentIds_) VULKAN_HPP_NOEXCEPT { swapchainCount = static_cast(presentIds_.size()); pPresentIds = presentIds_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPresentIdKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPresentIdKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, swapchainCount, pPresentIds); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PresentIdKHR const &) const = default; #else bool operator==(PresentIdKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (swapchainCount == rhs.swapchainCount) && (pPresentIds == rhs.pPresentIds); # endif } bool operator!=(PresentIdKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentIdKHR; const void *pNext = {}; uint32_t swapchainCount = {}; const uint64_t *pPresentIds = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PresentIdKHR) == sizeof(VkPresentIdKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PresentIdKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PresentIdKHR; }; struct PresentInfoKHR { using NativeType = VkPresentInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PresentInfoKHR(uint32_t waitSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore *pWaitSemaphores_ = {}, uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::SwapchainKHR *pSwapchains_ = {}, const uint32_t *pImageIndices_ = {}, VULKAN_HPP_NAMESPACE::Result *pResults_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), waitSemaphoreCount(waitSemaphoreCount_), pWaitSemaphores(pWaitSemaphores_), swapchainCount(swapchainCount_), pSwapchains(pSwapchains_), pImageIndices(pImageIndices_), pResults(pResults_) { } VULKAN_HPP_CONSTEXPR PresentInfoKHR(PresentInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PresentInfoKHR(VkPresentInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : PresentInfoKHR(*reinterpret_cast(&rhs)) {} # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PresentInfoKHR(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &waitSemaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &swapchains_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &imageIndices_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &results_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , waitSemaphoreCount(static_cast(waitSemaphores_.size())) , pWaitSemaphores(waitSemaphores_.data()) , swapchainCount(static_cast(swapchains_.size())) , pSwapchains(swapchains_.data()) , pImageIndices(imageIndices_.data()) , pResults(results_.data()) { # ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT(swapchains_.size() == imageIndices_.size()); VULKAN_HPP_ASSERT(results_.empty() || (swapchains_.size() == results_.size())); VULKAN_HPP_ASSERT(results_.empty() || (imageIndices_.size() == results_.size())); # else if(swapchains_.size() != imageIndices_.size()) { throw LogicError(VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: swapchains_.size() != imageIndices_.size()"); } if(!results_.empty() && (swapchains_.size() != results_.size())) { throw LogicError(VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( swapchains_.size() != results_.size() )"); } if(!results_.empty() && (imageIndices_.size() != results_.size())) { throw LogicError(VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( imageIndices_.size() != results_.size() )"); } # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PresentInfoKHR &operator=(PresentInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PresentInfoKHR &operator=(VkPresentInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR &setWaitSemaphoreCount(uint32_t waitSemaphoreCount_) VULKAN_HPP_NOEXCEPT { waitSemaphoreCount = waitSemaphoreCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR &setPWaitSemaphores(const VULKAN_HPP_NAMESPACE::Semaphore *pWaitSemaphores_) VULKAN_HPP_NOEXCEPT { pWaitSemaphores = pWaitSemaphores_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PresentInfoKHR & setWaitSemaphores(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &waitSemaphores_) VULKAN_HPP_NOEXCEPT { waitSemaphoreCount = static_cast(waitSemaphores_.size()); pWaitSemaphores = waitSemaphores_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR &setSwapchainCount(uint32_t swapchainCount_) VULKAN_HPP_NOEXCEPT { swapchainCount = swapchainCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR &setPSwapchains(const VULKAN_HPP_NAMESPACE::SwapchainKHR *pSwapchains_) VULKAN_HPP_NOEXCEPT { pSwapchains = pSwapchains_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PresentInfoKHR & setSwapchains(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &swapchains_) VULKAN_HPP_NOEXCEPT { swapchainCount = static_cast(swapchains_.size()); pSwapchains = swapchains_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR &setPImageIndices(const uint32_t *pImageIndices_) VULKAN_HPP_NOEXCEPT { pImageIndices = pImageIndices_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PresentInfoKHR &setImageIndices(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &imageIndices_) VULKAN_HPP_NOEXCEPT { swapchainCount = static_cast(imageIndices_.size()); pImageIndices = imageIndices_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR &setPResults(VULKAN_HPP_NAMESPACE::Result *pResults_) VULKAN_HPP_NOEXCEPT { pResults = pResults_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PresentInfoKHR &setResults(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &results_) VULKAN_HPP_NOEXCEPT { swapchainCount = static_cast(results_.size()); pResults = results_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPresentInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, waitSemaphoreCount, pWaitSemaphores, swapchainCount, pSwapchains, pImageIndices, pResults); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PresentInfoKHR const &) const = default; #else bool operator==(PresentInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (waitSemaphoreCount == rhs.waitSemaphoreCount) && (pWaitSemaphores == rhs.pWaitSemaphores) && (swapchainCount == rhs.swapchainCount) && (pSwapchains == rhs.pSwapchains) && (pImageIndices == rhs.pImageIndices) && (pResults == rhs.pResults); # endif } bool operator!=(PresentInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentInfoKHR; const void *pNext = {}; uint32_t waitSemaphoreCount = {}; const VULKAN_HPP_NAMESPACE::Semaphore *pWaitSemaphores = {}; uint32_t swapchainCount = {}; const VULKAN_HPP_NAMESPACE::SwapchainKHR *pSwapchains = {}; const uint32_t *pImageIndices = {}; VULKAN_HPP_NAMESPACE::Result *pResults = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PresentInfoKHR) == sizeof(VkPresentInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PresentInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PresentInfoKHR; }; struct RectLayerKHR { using NativeType = VkRectLayerKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR RectLayerKHR(VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {}, uint32_t layer_ = {}) VULKAN_HPP_NOEXCEPT : offset(offset_), extent(extent_), layer(layer_) { } VULKAN_HPP_CONSTEXPR RectLayerKHR(RectLayerKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; RectLayerKHR(VkRectLayerKHR const &rhs) VULKAN_HPP_NOEXCEPT : RectLayerKHR(*reinterpret_cast(&rhs)) {} explicit RectLayerKHR(Rect2D const &rect2D, uint32_t layer_ = {}) : offset(rect2D.offset) , extent(rect2D.extent) , layer(layer_) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ RectLayerKHR &operator=(RectLayerKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; RectLayerKHR &operator=(VkRectLayerKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 RectLayerKHR &setOffset(VULKAN_HPP_NAMESPACE::Offset2D const &offset_) VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 RectLayerKHR &setExtent(VULKAN_HPP_NAMESPACE::Extent2D const &extent_) VULKAN_HPP_NOEXCEPT { extent = extent_; return *this; } VULKAN_HPP_CONSTEXPR_14 RectLayerKHR &setLayer(uint32_t layer_) VULKAN_HPP_NOEXCEPT { layer = layer_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkRectLayerKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkRectLayerKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(offset, extent, layer); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(RectLayerKHR const &) const = default; #else bool operator==(RectLayerKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (offset == rhs.offset) && (extent == rhs.extent) && (layer == rhs.layer); # endif } bool operator!=(RectLayerKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::Offset2D offset = {}; VULKAN_HPP_NAMESPACE::Extent2D extent = {}; uint32_t layer = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::RectLayerKHR) == sizeof(VkRectLayerKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "RectLayerKHR is not nothrow_move_constructible!"); struct PresentRegionKHR { using NativeType = VkPresentRegionKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PresentRegionKHR(uint32_t rectangleCount_ = {}, const VULKAN_HPP_NAMESPACE::RectLayerKHR *pRectangles_ = {}) VULKAN_HPP_NOEXCEPT : rectangleCount(rectangleCount_), pRectangles(pRectangles_) { } VULKAN_HPP_CONSTEXPR PresentRegionKHR(PresentRegionKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PresentRegionKHR(VkPresentRegionKHR const &rhs) VULKAN_HPP_NOEXCEPT : PresentRegionKHR(*reinterpret_cast(&rhs)) {} # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PresentRegionKHR(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &rectangles_) : rectangleCount(static_cast(rectangles_.size())) , pRectangles(rectangles_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PresentRegionKHR &operator=(PresentRegionKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PresentRegionKHR &operator=(VkPresentRegionKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PresentRegionKHR &setRectangleCount(uint32_t rectangleCount_) VULKAN_HPP_NOEXCEPT { rectangleCount = rectangleCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PresentRegionKHR &setPRectangles(const VULKAN_HPP_NAMESPACE::RectLayerKHR *pRectangles_) VULKAN_HPP_NOEXCEPT { pRectangles = pRectangles_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PresentRegionKHR & setRectangles(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &rectangles_) VULKAN_HPP_NOEXCEPT { rectangleCount = static_cast(rectangles_.size()); pRectangles = rectangles_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPresentRegionKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPresentRegionKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(rectangleCount, pRectangles); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PresentRegionKHR const &) const = default; #else bool operator==(PresentRegionKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (rectangleCount == rhs.rectangleCount) && (pRectangles == rhs.pRectangles); # endif } bool operator!=(PresentRegionKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t rectangleCount = {}; const VULKAN_HPP_NAMESPACE::RectLayerKHR *pRectangles = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PresentRegionKHR) == sizeof(VkPresentRegionKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PresentRegionKHR is not nothrow_move_constructible!"); struct PresentRegionsKHR { using NativeType = VkPresentRegionsKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentRegionsKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PresentRegionsKHR(uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::PresentRegionKHR *pRegions_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), swapchainCount(swapchainCount_), pRegions(pRegions_) { } VULKAN_HPP_CONSTEXPR PresentRegionsKHR(PresentRegionsKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PresentRegionsKHR(VkPresentRegionsKHR const &rhs) VULKAN_HPP_NOEXCEPT : PresentRegionsKHR(*reinterpret_cast(&rhs)) {} # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PresentRegionsKHR(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const ®ions_, const void *pNext_ = nullptr) : pNext(pNext_) , swapchainCount(static_cast(regions_.size())) , pRegions(regions_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PresentRegionsKHR &operator=(PresentRegionsKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; PresentRegionsKHR &operator=(VkPresentRegionsKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR &setSwapchainCount(uint32_t swapchainCount_) VULKAN_HPP_NOEXCEPT { swapchainCount = swapchainCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR &setPRegions(const VULKAN_HPP_NAMESPACE::PresentRegionKHR *pRegions_) VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PresentRegionsKHR & setRegions(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const ®ions_) VULKAN_HPP_NOEXCEPT { swapchainCount = static_cast(regions_.size()); pRegions = regions_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPresentRegionsKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPresentRegionsKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, swapchainCount, pRegions); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PresentRegionsKHR const &) const = default; #else bool operator==(PresentRegionsKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (swapchainCount == rhs.swapchainCount) && (pRegions == rhs.pRegions); # endif } bool operator!=(PresentRegionsKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentRegionsKHR; const void *pNext = {}; uint32_t swapchainCount = {}; const VULKAN_HPP_NAMESPACE::PresentRegionKHR *pRegions = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PresentRegionsKHR) == sizeof(VkPresentRegionsKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PresentRegionsKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = PresentRegionsKHR; }; struct PresentTimeGOOGLE { using NativeType = VkPresentTimeGOOGLE; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE(uint32_t presentID_ = {}, uint64_t desiredPresentTime_ = {}) VULKAN_HPP_NOEXCEPT : presentID(presentID_), desiredPresentTime(desiredPresentTime_) { } VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE(PresentTimeGOOGLE const &rhs) VULKAN_HPP_NOEXCEPT = default; PresentTimeGOOGLE(VkPresentTimeGOOGLE const &rhs) VULKAN_HPP_NOEXCEPT : PresentTimeGOOGLE(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PresentTimeGOOGLE &operator=(PresentTimeGOOGLE const &rhs) VULKAN_HPP_NOEXCEPT = default; PresentTimeGOOGLE &operator=(VkPresentTimeGOOGLE const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PresentTimeGOOGLE &setPresentID(uint32_t presentID_) VULKAN_HPP_NOEXCEPT { presentID = presentID_; return *this; } VULKAN_HPP_CONSTEXPR_14 PresentTimeGOOGLE &setDesiredPresentTime(uint64_t desiredPresentTime_) VULKAN_HPP_NOEXCEPT { desiredPresentTime = desiredPresentTime_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPresentTimeGOOGLE const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPresentTimeGOOGLE &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(presentID, desiredPresentTime); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PresentTimeGOOGLE const &) const = default; #else bool operator==(PresentTimeGOOGLE const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (presentID == rhs.presentID) && (desiredPresentTime == rhs.desiredPresentTime); # endif } bool operator!=(PresentTimeGOOGLE const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t presentID = {}; uint64_t desiredPresentTime = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE) == sizeof(VkPresentTimeGOOGLE), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PresentTimeGOOGLE is not nothrow_move_constructible!"); struct PresentTimesInfoGOOGLE { using NativeType = VkPresentTimesInfoGOOGLE; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentTimesInfoGOOGLE; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE(uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE *pTimes_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), swapchainCount(swapchainCount_), pTimes(pTimes_) { } VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE(PresentTimesInfoGOOGLE const &rhs) VULKAN_HPP_NOEXCEPT = default; PresentTimesInfoGOOGLE(VkPresentTimesInfoGOOGLE const &rhs) VULKAN_HPP_NOEXCEPT : PresentTimesInfoGOOGLE(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PresentTimesInfoGOOGLE(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const ×_, const void *pNext_ = nullptr) : pNext(pNext_) , swapchainCount(static_cast(times_.size())) , pTimes(times_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PresentTimesInfoGOOGLE &operator=(PresentTimesInfoGOOGLE const &rhs) VULKAN_HPP_NOEXCEPT = default; PresentTimesInfoGOOGLE &operator=(VkPresentTimesInfoGOOGLE const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE &setSwapchainCount(uint32_t swapchainCount_) VULKAN_HPP_NOEXCEPT { swapchainCount = swapchainCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE &setPTimes(const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE *pTimes_) VULKAN_HPP_NOEXCEPT { pTimes = pTimes_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) PresentTimesInfoGOOGLE & setTimes(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const ×_) VULKAN_HPP_NOEXCEPT { swapchainCount = static_cast(times_.size()); pTimes = times_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPresentTimesInfoGOOGLE const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPresentTimesInfoGOOGLE &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, swapchainCount, pTimes); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PresentTimesInfoGOOGLE const &) const = default; #else bool operator==(PresentTimesInfoGOOGLE const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (swapchainCount == rhs.swapchainCount) && (pTimes == rhs.pTimes); # endif } bool operator!=(PresentTimesInfoGOOGLE const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentTimesInfoGOOGLE; const void *pNext = {}; uint32_t swapchainCount = {}; const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE *pTimes = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE) == sizeof(VkPresentTimesInfoGOOGLE), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PresentTimesInfoGOOGLE is not nothrow_move_constructible!"); template<> struct CppType { using Type = PresentTimesInfoGOOGLE; }; struct PrivateDataSlotCreateInfo { using NativeType = VkPrivateDataSlotCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePrivateDataSlotCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfo(VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_) { } VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfo(PrivateDataSlotCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PrivateDataSlotCreateInfo(VkPrivateDataSlotCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : PrivateDataSlotCreateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ PrivateDataSlotCreateInfo &operator=(PrivateDataSlotCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; PrivateDataSlotCreateInfo &operator=(VkPrivateDataSlotCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 PrivateDataSlotCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PrivateDataSlotCreateInfo &setFlags(VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkPrivateDataSlotCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkPrivateDataSlotCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(PrivateDataSlotCreateInfo const &) const = default; #else bool operator==(PrivateDataSlotCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags); # endif } bool operator!=(PrivateDataSlotCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePrivateDataSlotCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo) == sizeof(VkPrivateDataSlotCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "PrivateDataSlotCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = PrivateDataSlotCreateInfo; }; using PrivateDataSlotCreateInfoEXT = PrivateDataSlotCreateInfo; struct ProtectedSubmitInfo { using NativeType = VkProtectedSubmitInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eProtectedSubmitInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo(VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), protectedSubmit(protectedSubmit_) { } VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo(ProtectedSubmitInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; ProtectedSubmitInfo(VkProtectedSubmitInfo const &rhs) VULKAN_HPP_NOEXCEPT : ProtectedSubmitInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ProtectedSubmitInfo &operator=(ProtectedSubmitInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; ProtectedSubmitInfo &operator=(VkProtectedSubmitInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ProtectedSubmitInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ProtectedSubmitInfo &setProtectedSubmit(VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_) VULKAN_HPP_NOEXCEPT { protectedSubmit = protectedSubmit_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkProtectedSubmitInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkProtectedSubmitInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, protectedSubmit); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ProtectedSubmitInfo const &) const = default; #else bool operator==(ProtectedSubmitInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (protectedSubmit == rhs.protectedSubmit); # endif } bool operator!=(ProtectedSubmitInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eProtectedSubmitInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo) == sizeof(VkProtectedSubmitInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ProtectedSubmitInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = ProtectedSubmitInfo; }; struct QueryPoolCreateInfo { using NativeType = VkQueryPoolCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo(VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::QueryType queryType_ = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion, uint32_t queryCount_ = {}, VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), queryType(queryType_), queryCount(queryCount_), pipelineStatistics(pipelineStatistics_) { } VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo(QueryPoolCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; QueryPoolCreateInfo(VkQueryPoolCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : QueryPoolCreateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ QueryPoolCreateInfo &operator=(QueryPoolCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; QueryPoolCreateInfo &operator=(VkQueryPoolCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo &setFlags(VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo &setQueryType(VULKAN_HPP_NAMESPACE::QueryType queryType_) VULKAN_HPP_NOEXCEPT { queryType = queryType_; return *this; } VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo &setQueryCount(uint32_t queryCount_) VULKAN_HPP_NOEXCEPT { queryCount = queryCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setPipelineStatistics(VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_) VULKAN_HPP_NOEXCEPT { pipelineStatistics = pipelineStatistics_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkQueryPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkQueryPoolCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, queryType, queryCount, pipelineStatistics); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(QueryPoolCreateInfo const &) const = default; #else bool operator==(QueryPoolCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (queryType == rhs.queryType) && (queryCount == rhs.queryCount) && (pipelineStatistics == rhs.pipelineStatistics); # endif } bool operator!=(QueryPoolCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags = {}; VULKAN_HPP_NAMESPACE::QueryType queryType = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion; uint32_t queryCount = {}; VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo) == sizeof(VkQueryPoolCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "QueryPoolCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = QueryPoolCreateInfo; }; struct QueryPoolPerformanceCreateInfoKHR { using NativeType = VkQueryPoolPerformanceCreateInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolPerformanceCreateInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR(uint32_t queueFamilyIndex_ = {}, uint32_t counterIndexCount_ = {}, const uint32_t *pCounterIndices_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), queueFamilyIndex(queueFamilyIndex_), counterIndexCount(counterIndexCount_), pCounterIndices(pCounterIndices_) { } VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR(QueryPoolPerformanceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; QueryPoolPerformanceCreateInfoKHR(VkQueryPoolPerformanceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : QueryPoolPerformanceCreateInfoKHR(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) QueryPoolPerformanceCreateInfoKHR(uint32_t queueFamilyIndex_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &counterIndices_, const void *pNext_ = nullptr) : pNext(pNext_) , queueFamilyIndex(queueFamilyIndex_) , counterIndexCount(static_cast(counterIndices_.size())) , pCounterIndices(counterIndices_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ QueryPoolPerformanceCreateInfoKHR &operator=(QueryPoolPerformanceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; QueryPoolPerformanceCreateInfoKHR &operator=(VkQueryPoolPerformanceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR &setQueueFamilyIndex(uint32_t queueFamilyIndex_) VULKAN_HPP_NOEXCEPT { queueFamilyIndex = queueFamilyIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR &setCounterIndexCount(uint32_t counterIndexCount_) VULKAN_HPP_NOEXCEPT { counterIndexCount = counterIndexCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR &setPCounterIndices(const uint32_t *pCounterIndices_) VULKAN_HPP_NOEXCEPT { pCounterIndices = pCounterIndices_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) QueryPoolPerformanceCreateInfoKHR & setCounterIndices(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &counterIndices_) VULKAN_HPP_NOEXCEPT { counterIndexCount = static_cast(counterIndices_.size()); pCounterIndices = counterIndices_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkQueryPoolPerformanceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkQueryPoolPerformanceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, queueFamilyIndex, counterIndexCount, pCounterIndices); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(QueryPoolPerformanceCreateInfoKHR const &) const = default; #else bool operator==(QueryPoolPerformanceCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (queueFamilyIndex == rhs.queueFamilyIndex) && (counterIndexCount == rhs.counterIndexCount) && (pCounterIndices == rhs.pCounterIndices); # endif } bool operator!=(QueryPoolPerformanceCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceCreateInfoKHR; const void *pNext = {}; uint32_t queueFamilyIndex = {}; uint32_t counterIndexCount = {}; const uint32_t *pCounterIndices = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR) == sizeof(VkQueryPoolPerformanceCreateInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "QueryPoolPerformanceCreateInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = QueryPoolPerformanceCreateInfoKHR; }; struct QueryPoolPerformanceQueryCreateInfoINTEL { using NativeType = VkQueryPoolPerformanceQueryCreateInfoINTEL; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), performanceCountersSampling(performanceCountersSampling_) { } VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL(QueryPoolPerformanceQueryCreateInfoINTEL const &rhs) VULKAN_HPP_NOEXCEPT = default; QueryPoolPerformanceQueryCreateInfoINTEL(VkQueryPoolPerformanceQueryCreateInfoINTEL const &rhs) VULKAN_HPP_NOEXCEPT : QueryPoolPerformanceQueryCreateInfoINTEL(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ QueryPoolPerformanceQueryCreateInfoINTEL &operator=(QueryPoolPerformanceQueryCreateInfoINTEL const &rhs) VULKAN_HPP_NOEXCEPT = default; QueryPoolPerformanceQueryCreateInfoINTEL &operator=(VkQueryPoolPerformanceQueryCreateInfoINTEL const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceQueryCreateInfoINTEL &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceQueryCreateInfoINTEL & setPerformanceCountersSampling(VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_) VULKAN_HPP_NOEXCEPT { performanceCountersSampling = performanceCountersSampling_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkQueryPoolPerformanceQueryCreateInfoINTEL const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkQueryPoolPerformanceQueryCreateInfoINTEL &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, performanceCountersSampling); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(QueryPoolPerformanceQueryCreateInfoINTEL const &) const = default; #else bool operator==(QueryPoolPerformanceQueryCreateInfoINTEL const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (performanceCountersSampling == rhs.performanceCountersSampling); # endif } bool operator!=(QueryPoolPerformanceQueryCreateInfoINTEL const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL; const void *pNext = {}; VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL) == sizeof(VkQueryPoolPerformanceQueryCreateInfoINTEL), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "QueryPoolPerformanceQueryCreateInfoINTEL is not nothrow_move_constructible!"); template<> struct CppType { using Type = QueryPoolPerformanceQueryCreateInfoINTEL; }; using QueryPoolCreateInfoINTEL = QueryPoolPerformanceQueryCreateInfoINTEL; struct QueueFamilyCheckpointProperties2NV { using NativeType = VkQueueFamilyCheckpointProperties2NV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyCheckpointProperties2NV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 checkpointExecutionStageMask_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), checkpointExecutionStageMask(checkpointExecutionStageMask_) { } VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV(QueueFamilyCheckpointProperties2NV const &rhs) VULKAN_HPP_NOEXCEPT = default; QueueFamilyCheckpointProperties2NV(VkQueueFamilyCheckpointProperties2NV const &rhs) VULKAN_HPP_NOEXCEPT : QueueFamilyCheckpointProperties2NV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ QueueFamilyCheckpointProperties2NV &operator=(QueueFamilyCheckpointProperties2NV const &rhs) VULKAN_HPP_NOEXCEPT = default; QueueFamilyCheckpointProperties2NV &operator=(VkQueueFamilyCheckpointProperties2NV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkQueueFamilyCheckpointProperties2NV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkQueueFamilyCheckpointProperties2NV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, checkpointExecutionStageMask); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(QueueFamilyCheckpointProperties2NV const &) const = default; #else bool operator==(QueueFamilyCheckpointProperties2NV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (checkpointExecutionStageMask == rhs.checkpointExecutionStageMask); # endif } bool operator!=(QueueFamilyCheckpointProperties2NV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointProperties2NV; void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineStageFlags2 checkpointExecutionStageMask = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV) == sizeof(VkQueueFamilyCheckpointProperties2NV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "QueueFamilyCheckpointProperties2NV is not nothrow_move_constructible!"); template<> struct CppType { using Type = QueueFamilyCheckpointProperties2NV; }; struct QueueFamilyCheckpointPropertiesNV { using NativeType = VkQueueFamilyCheckpointPropertiesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyCheckpointPropertiesNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV(VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), checkpointExecutionStageMask(checkpointExecutionStageMask_) { } VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV(QueueFamilyCheckpointPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; QueueFamilyCheckpointPropertiesNV(VkQueueFamilyCheckpointPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT : QueueFamilyCheckpointPropertiesNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ QueueFamilyCheckpointPropertiesNV &operator=(QueueFamilyCheckpointPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT = default; QueueFamilyCheckpointPropertiesNV &operator=(VkQueueFamilyCheckpointPropertiesNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkQueueFamilyCheckpointPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkQueueFamilyCheckpointPropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, checkpointExecutionStageMask); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(QueueFamilyCheckpointPropertiesNV const &) const = default; #else bool operator==(QueueFamilyCheckpointPropertiesNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (checkpointExecutionStageMask == rhs.checkpointExecutionStageMask); # endif } bool operator!=(QueueFamilyCheckpointPropertiesNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointPropertiesNV; void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV) == sizeof(VkQueueFamilyCheckpointPropertiesNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "QueueFamilyCheckpointPropertiesNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = QueueFamilyCheckpointPropertiesNV; }; struct QueueFamilyGlobalPriorityPropertiesKHR { using NativeType = VkQueueFamilyGlobalPriorityPropertiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyGlobalPriorityPropertiesKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR(uint32_t priorityCount_ = {}, std::array const & priorities_ = { { VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow } }, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), priorityCount(priorityCount_), priorities(priorities_) { } VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR(QueueFamilyGlobalPriorityPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; QueueFamilyGlobalPriorityPropertiesKHR(VkQueueFamilyGlobalPriorityPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT : QueueFamilyGlobalPriorityPropertiesKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ QueueFamilyGlobalPriorityPropertiesKHR &operator=(QueueFamilyGlobalPriorityPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; QueueFamilyGlobalPriorityPropertiesKHR &operator=(VkQueueFamilyGlobalPriorityPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR &setPriorityCount(uint32_t priorityCount_) VULKAN_HPP_NOEXCEPT { priorityCount = priorityCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR & setPriorities(std::array priorities_) VULKAN_HPP_NOEXCEPT { priorities = priorities_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkQueueFamilyGlobalPriorityPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkQueueFamilyGlobalPriorityPropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, priorityCount, priorities); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(QueueFamilyGlobalPriorityPropertiesKHR const &) const = default; #else bool operator==(QueueFamilyGlobalPriorityPropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (priorityCount == rhs.priorityCount) && (priorities == rhs.priorities); # endif } bool operator!=(QueueFamilyGlobalPriorityPropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyGlobalPriorityPropertiesKHR; void *pNext = {}; uint32_t priorityCount = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D priorities = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR) == sizeof(VkQueueFamilyGlobalPriorityPropertiesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "QueueFamilyGlobalPriorityPropertiesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = QueueFamilyGlobalPriorityPropertiesKHR; }; using QueueFamilyGlobalPriorityPropertiesEXT = QueueFamilyGlobalPriorityPropertiesKHR; struct QueueFamilyProperties { using NativeType = VkQueueFamilyProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR QueueFamilyProperties(VULKAN_HPP_NAMESPACE::QueueFlags queueFlags_ = {}, uint32_t queueCount_ = {}, uint32_t timestampValidBits_ = {}, VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity_ = {}) VULKAN_HPP_NOEXCEPT : queueFlags(queueFlags_), queueCount(queueCount_), timestampValidBits(timestampValidBits_), minImageTransferGranularity(minImageTransferGranularity_) { } VULKAN_HPP_CONSTEXPR QueueFamilyProperties(QueueFamilyProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; QueueFamilyProperties(VkQueueFamilyProperties const &rhs) VULKAN_HPP_NOEXCEPT : QueueFamilyProperties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ QueueFamilyProperties &operator=(QueueFamilyProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; QueueFamilyProperties &operator=(VkQueueFamilyProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkQueueFamilyProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkQueueFamilyProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(queueFlags, queueCount, timestampValidBits, minImageTransferGranularity); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(QueueFamilyProperties const &) const = default; #else bool operator==(QueueFamilyProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (queueFlags == rhs.queueFlags) && (queueCount == rhs.queueCount) && (timestampValidBits == rhs.timestampValidBits) && (minImageTransferGranularity == rhs.minImageTransferGranularity); # endif } bool operator!=(QueueFamilyProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::QueueFlags queueFlags = {}; uint32_t queueCount = {}; uint32_t timestampValidBits = {}; VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::QueueFamilyProperties) == sizeof(VkQueueFamilyProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "QueueFamilyProperties is not nothrow_move_constructible!"); struct QueueFamilyProperties2 { using NativeType = VkQueueFamilyProperties2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyProperties2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR QueueFamilyProperties2(VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), queueFamilyProperties(queueFamilyProperties_) { } VULKAN_HPP_CONSTEXPR QueueFamilyProperties2(QueueFamilyProperties2 const &rhs) VULKAN_HPP_NOEXCEPT = default; QueueFamilyProperties2(VkQueueFamilyProperties2 const &rhs) VULKAN_HPP_NOEXCEPT : QueueFamilyProperties2(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ QueueFamilyProperties2 &operator=(QueueFamilyProperties2 const &rhs) VULKAN_HPP_NOEXCEPT = default; QueueFamilyProperties2 &operator=(VkQueueFamilyProperties2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkQueueFamilyProperties2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkQueueFamilyProperties2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, queueFamilyProperties); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(QueueFamilyProperties2 const &) const = default; #else bool operator==(QueueFamilyProperties2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (queueFamilyProperties == rhs.queueFamilyProperties); # endif } bool operator!=(QueueFamilyProperties2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyProperties2; void *pNext = {}; VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::QueueFamilyProperties2) == sizeof(VkQueueFamilyProperties2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "QueueFamilyProperties2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = QueueFamilyProperties2; }; using QueueFamilyProperties2KHR = QueueFamilyProperties2; #if defined(VK_ENABLE_BETA_EXTENSIONS) struct QueueFamilyQueryResultStatusProperties2KHR { using NativeType = VkQueueFamilyQueryResultStatusProperties2KHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyQueryResultStatusProperties2KHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR QueueFamilyQueryResultStatusProperties2KHR(VULKAN_HPP_NAMESPACE::Bool32 supported_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), supported(supported_) { } VULKAN_HPP_CONSTEXPR QueueFamilyQueryResultStatusProperties2KHR(QueueFamilyQueryResultStatusProperties2KHR const &rhs) VULKAN_HPP_NOEXCEPT = default; QueueFamilyQueryResultStatusProperties2KHR(VkQueueFamilyQueryResultStatusProperties2KHR const &rhs) VULKAN_HPP_NOEXCEPT : QueueFamilyQueryResultStatusProperties2KHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ QueueFamilyQueryResultStatusProperties2KHR &operator=(QueueFamilyQueryResultStatusProperties2KHR const &rhs) VULKAN_HPP_NOEXCEPT = default; QueueFamilyQueryResultStatusProperties2KHR &operator=(VkQueueFamilyQueryResultStatusProperties2KHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 QueueFamilyQueryResultStatusProperties2KHR &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 QueueFamilyQueryResultStatusProperties2KHR &setSupported(VULKAN_HPP_NAMESPACE::Bool32 supported_) VULKAN_HPP_NOEXCEPT { supported = supported_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkQueueFamilyQueryResultStatusProperties2KHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkQueueFamilyQueryResultStatusProperties2KHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, supported); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(QueueFamilyQueryResultStatusProperties2KHR const &) const = default; # else bool operator==(QueueFamilyQueryResultStatusProperties2KHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (supported == rhs.supported); # endif } bool operator!=(QueueFamilyQueryResultStatusProperties2KHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyQueryResultStatusProperties2KHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 supported = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusProperties2KHR) == sizeof(VkQueueFamilyQueryResultStatusProperties2KHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "QueueFamilyQueryResultStatusProperties2KHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = QueueFamilyQueryResultStatusProperties2KHR; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct RayTracingShaderGroupCreateInfoKHR { using NativeType = VkRayTracingShaderGroupCreateInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingShaderGroupCreateInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, uint32_t generalShader_ = {}, uint32_t closestHitShader_ = {}, uint32_t anyHitShader_ = {}, uint32_t intersectionShader_ = {}, const void *pShaderGroupCaptureReplayHandle_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), type(type_), generalShader(generalShader_), closestHitShader(closestHitShader_), anyHitShader(anyHitShader_), intersectionShader(intersectionShader_), pShaderGroupCaptureReplayHandle(pShaderGroupCaptureReplayHandle_) { } VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR(RayTracingShaderGroupCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; RayTracingShaderGroupCreateInfoKHR(VkRayTracingShaderGroupCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : RayTracingShaderGroupCreateInfoKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ RayTracingShaderGroupCreateInfoKHR &operator=(RayTracingShaderGroupCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; RayTracingShaderGroupCreateInfoKHR &operator=(VkRayTracingShaderGroupCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR &setType(VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_) VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR &setGeneralShader(uint32_t generalShader_) VULKAN_HPP_NOEXCEPT { generalShader = generalShader_; return *this; } VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR &setClosestHitShader(uint32_t closestHitShader_) VULKAN_HPP_NOEXCEPT { closestHitShader = closestHitShader_; return *this; } VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR &setAnyHitShader(uint32_t anyHitShader_) VULKAN_HPP_NOEXCEPT { anyHitShader = anyHitShader_; return *this; } VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR &setIntersectionShader(uint32_t intersectionShader_) VULKAN_HPP_NOEXCEPT { intersectionShader = intersectionShader_; return *this; } VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setPShaderGroupCaptureReplayHandle(const void *pShaderGroupCaptureReplayHandle_) VULKAN_HPP_NOEXCEPT { pShaderGroupCaptureReplayHandle = pShaderGroupCaptureReplayHandle_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkRayTracingShaderGroupCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkRayTracingShaderGroupCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, type, generalShader, closestHitShader, anyHitShader, intersectionShader, pShaderGroupCaptureReplayHandle); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(RayTracingShaderGroupCreateInfoKHR const &) const = default; #else bool operator==(RayTracingShaderGroupCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (type == rhs.type) && (generalShader == rhs.generalShader) && (closestHitShader == rhs.closestHitShader) && (anyHitShader == rhs.anyHitShader) && (intersectionShader == rhs.intersectionShader) && (pShaderGroupCaptureReplayHandle == rhs.pShaderGroupCaptureReplayHandle); # endif } bool operator!=(RayTracingShaderGroupCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral; uint32_t generalShader = {}; uint32_t closestHitShader = {}; uint32_t anyHitShader = {}; uint32_t intersectionShader = {}; const void *pShaderGroupCaptureReplayHandle = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR) == sizeof(VkRayTracingShaderGroupCreateInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "RayTracingShaderGroupCreateInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = RayTracingShaderGroupCreateInfoKHR; }; struct RayTracingPipelineInterfaceCreateInfoKHR { using NativeType = VkRayTracingPipelineInterfaceCreateInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR(uint32_t maxPipelineRayPayloadSize_ = {}, uint32_t maxPipelineRayHitAttributeSize_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), maxPipelineRayPayloadSize(maxPipelineRayPayloadSize_), maxPipelineRayHitAttributeSize(maxPipelineRayHitAttributeSize_) { } VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR(RayTracingPipelineInterfaceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; RayTracingPipelineInterfaceCreateInfoKHR(VkRayTracingPipelineInterfaceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : RayTracingPipelineInterfaceCreateInfoKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ RayTracingPipelineInterfaceCreateInfoKHR &operator=(RayTracingPipelineInterfaceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; RayTracingPipelineInterfaceCreateInfoKHR &operator=(VkRayTracingPipelineInterfaceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR &setMaxPipelineRayPayloadSize(uint32_t maxPipelineRayPayloadSize_) VULKAN_HPP_NOEXCEPT { maxPipelineRayPayloadSize = maxPipelineRayPayloadSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & setMaxPipelineRayHitAttributeSize(uint32_t maxPipelineRayHitAttributeSize_) VULKAN_HPP_NOEXCEPT { maxPipelineRayHitAttributeSize = maxPipelineRayHitAttributeSize_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkRayTracingPipelineInterfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkRayTracingPipelineInterfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, maxPipelineRayPayloadSize, maxPipelineRayHitAttributeSize); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(RayTracingPipelineInterfaceCreateInfoKHR const &) const = default; #else bool operator==(RayTracingPipelineInterfaceCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (maxPipelineRayPayloadSize == rhs.maxPipelineRayPayloadSize) && (maxPipelineRayHitAttributeSize == rhs.maxPipelineRayHitAttributeSize); # endif } bool operator!=(RayTracingPipelineInterfaceCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR; const void *pNext = {}; uint32_t maxPipelineRayPayloadSize = {}; uint32_t maxPipelineRayHitAttributeSize = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR) == sizeof(VkRayTracingPipelineInterfaceCreateInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "RayTracingPipelineInterfaceCreateInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = RayTracingPipelineInterfaceCreateInfoKHR; }; struct RayTracingPipelineCreateInfoKHR { using NativeType = VkRayTracingPipelineCreateInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo *pStages_ = {}, uint32_t groupCount_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR *pGroups_ = {}, uint32_t maxPipelineRayRecursionDepth_ = {}, const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR *pLibraryInfo_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR *pLibraryInterface_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo *pDynamicState_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), stageCount(stageCount_), pStages(pStages_), groupCount(groupCount_), pGroups(pGroups_), maxPipelineRayRecursionDepth(maxPipelineRayRecursionDepth_), pLibraryInfo(pLibraryInfo_), pLibraryInterface(pLibraryInterface_), pDynamicState(pDynamicState_), layout(layout_), basePipelineHandle(basePipelineHandle_), basePipelineIndex(basePipelineIndex_) { } VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR(RayTracingPipelineCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; RayTracingPipelineCreateInfoKHR(VkRayTracingPipelineCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : RayTracingPipelineCreateInfoKHR(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) RayTracingPipelineCreateInfoKHR( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &stages_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &groups_ = {}, uint32_t maxPipelineRayRecursionDepth_ = {}, const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR *pLibraryInfo_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR *pLibraryInterface_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo *pDynamicState_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , stageCount(static_cast(stages_.size())) , pStages(stages_.data()) , groupCount(static_cast(groups_.size())) , pGroups(groups_.data()) , maxPipelineRayRecursionDepth(maxPipelineRayRecursionDepth_) , pLibraryInfo(pLibraryInfo_) , pLibraryInterface(pLibraryInterface_) , pDynamicState(pDynamicState_) , layout(layout_) , basePipelineHandle(basePipelineHandle_) , basePipelineIndex(basePipelineIndex_) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ RayTracingPipelineCreateInfoKHR &operator=(RayTracingPipelineCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; RayTracingPipelineCreateInfoKHR &operator=(VkRayTracingPipelineCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR &setFlags(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR &setStageCount(uint32_t stageCount_) VULKAN_HPP_NOEXCEPT { stageCount = stageCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPStages(const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo *pStages_) VULKAN_HPP_NOEXCEPT { pStages = pStages_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) RayTracingPipelineCreateInfoKHR & setStages(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &stages_) VULKAN_HPP_NOEXCEPT { stageCount = static_cast(stages_.size()); pStages = stages_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR &setGroupCount(uint32_t groupCount_) VULKAN_HPP_NOEXCEPT { groupCount = groupCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPGroups(const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR *pGroups_) VULKAN_HPP_NOEXCEPT { pGroups = pGroups_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) RayTracingPipelineCreateInfoKHR &setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &groups_) VULKAN_HPP_NOEXCEPT { groupCount = static_cast(groups_.size()); pGroups = groups_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR &setMaxPipelineRayRecursionDepth(uint32_t maxPipelineRayRecursionDepth_) VULKAN_HPP_NOEXCEPT { maxPipelineRayRecursionDepth = maxPipelineRayRecursionDepth_; return *this; } VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPLibraryInfo(const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR *pLibraryInfo_) VULKAN_HPP_NOEXCEPT { pLibraryInfo = pLibraryInfo_; return *this; } VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPLibraryInterface(const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR *pLibraryInterface_) VULKAN_HPP_NOEXCEPT { pLibraryInterface = pLibraryInterface_; return *this; } VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPDynamicState(const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo *pDynamicState_) VULKAN_HPP_NOEXCEPT { pDynamicState = pDynamicState_; return *this; } VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR &setLayout(VULKAN_HPP_NAMESPACE::PipelineLayout layout_) VULKAN_HPP_NOEXCEPT { layout = layout_; return *this; } VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR &setBasePipelineHandle(VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_) VULKAN_HPP_NOEXCEPT { basePipelineHandle = basePipelineHandle_; return *this; } VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR &setBasePipelineIndex(int32_t basePipelineIndex_) VULKAN_HPP_NOEXCEPT { basePipelineIndex = basePipelineIndex_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkRayTracingPipelineCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkRayTracingPipelineCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, stageCount, pStages, groupCount, pGroups, maxPipelineRayRecursionDepth, pLibraryInfo, pLibraryInterface, pDynamicState, layout, basePipelineHandle, basePipelineIndex); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(RayTracingPipelineCreateInfoKHR const &) const = default; #else bool operator==(RayTracingPipelineCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (stageCount == rhs.stageCount) && (pStages == rhs.pStages) && (groupCount == rhs.groupCount) && (pGroups == rhs.pGroups) && (maxPipelineRayRecursionDepth == rhs.maxPipelineRayRecursionDepth) && (pLibraryInfo == rhs.pLibraryInfo) && (pLibraryInterface == rhs.pLibraryInterface) && (pDynamicState == rhs.pDynamicState) && (layout == rhs.layout) && (basePipelineHandle == rhs.basePipelineHandle) && (basePipelineIndex == rhs.basePipelineIndex); # endif } bool operator!=(RayTracingPipelineCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; uint32_t stageCount = {}; const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo *pStages = {}; uint32_t groupCount = {}; const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR *pGroups = {}; uint32_t maxPipelineRayRecursionDepth = {}; const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR *pLibraryInfo = {}; const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR *pLibraryInterface = {}; const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo *pDynamicState = {}; VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; int32_t basePipelineIndex = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR) == sizeof(VkRayTracingPipelineCreateInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "RayTracingPipelineCreateInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = RayTracingPipelineCreateInfoKHR; }; struct RayTracingShaderGroupCreateInfoNV { using NativeType = VkRayTracingShaderGroupCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingShaderGroupCreateInfoNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, uint32_t generalShader_ = {}, uint32_t closestHitShader_ = {}, uint32_t anyHitShader_ = {}, uint32_t intersectionShader_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), type(type_), generalShader(generalShader_), closestHitShader(closestHitShader_), anyHitShader(anyHitShader_), intersectionShader(intersectionShader_) { } VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV(RayTracingShaderGroupCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; RayTracingShaderGroupCreateInfoNV(VkRayTracingShaderGroupCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : RayTracingShaderGroupCreateInfoNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ RayTracingShaderGroupCreateInfoNV &operator=(RayTracingShaderGroupCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; RayTracingShaderGroupCreateInfoNV &operator=(VkRayTracingShaderGroupCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV &setType(VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_) VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV &setGeneralShader(uint32_t generalShader_) VULKAN_HPP_NOEXCEPT { generalShader = generalShader_; return *this; } VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV &setClosestHitShader(uint32_t closestHitShader_) VULKAN_HPP_NOEXCEPT { closestHitShader = closestHitShader_; return *this; } VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV &setAnyHitShader(uint32_t anyHitShader_) VULKAN_HPP_NOEXCEPT { anyHitShader = anyHitShader_; return *this; } VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV &setIntersectionShader(uint32_t intersectionShader_) VULKAN_HPP_NOEXCEPT { intersectionShader = intersectionShader_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkRayTracingShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkRayTracingShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, type, generalShader, closestHitShader, anyHitShader, intersectionShader); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(RayTracingShaderGroupCreateInfoNV const &) const = default; #else bool operator==(RayTracingShaderGroupCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (type == rhs.type) && (generalShader == rhs.generalShader) && (closestHitShader == rhs.closestHitShader) && (anyHitShader == rhs.anyHitShader) && (intersectionShader == rhs.intersectionShader); # endif } bool operator!=(RayTracingShaderGroupCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral; uint32_t generalShader = {}; uint32_t closestHitShader = {}; uint32_t anyHitShader = {}; uint32_t intersectionShader = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV) == sizeof(VkRayTracingShaderGroupCreateInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "RayTracingShaderGroupCreateInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = RayTracingShaderGroupCreateInfoNV; }; struct RayTracingPipelineCreateInfoNV { using NativeType = VkRayTracingPipelineCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo *pStages_ = {}, uint32_t groupCount_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV *pGroups_ = {}, uint32_t maxRecursionDepth_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), stageCount(stageCount_), pStages(pStages_), groupCount(groupCount_), pGroups(pGroups_), maxRecursionDepth(maxRecursionDepth_), layout(layout_), basePipelineHandle(basePipelineHandle_), basePipelineIndex(basePipelineIndex_) { } VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV(RayTracingPipelineCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; RayTracingPipelineCreateInfoNV(VkRayTracingPipelineCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : RayTracingPipelineCreateInfoNV(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) RayTracingPipelineCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &stages_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &groups_ = {}, uint32_t maxRecursionDepth_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , stageCount(static_cast(stages_.size())) , pStages(stages_.data()) , groupCount(static_cast(groups_.size())) , pGroups(groups_.data()) , maxRecursionDepth(maxRecursionDepth_) , layout(layout_) , basePipelineHandle(basePipelineHandle_) , basePipelineIndex(basePipelineIndex_) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ RayTracingPipelineCreateInfoNV &operator=(RayTracingPipelineCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; RayTracingPipelineCreateInfoNV &operator=(VkRayTracingPipelineCreateInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV &setFlags(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV &setStageCount(uint32_t stageCount_) VULKAN_HPP_NOEXCEPT { stageCount = stageCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setPStages(const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo *pStages_) VULKAN_HPP_NOEXCEPT { pStages = pStages_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) RayTracingPipelineCreateInfoNV & setStages(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &stages_) VULKAN_HPP_NOEXCEPT { stageCount = static_cast(stages_.size()); pStages = stages_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV &setGroupCount(uint32_t groupCount_) VULKAN_HPP_NOEXCEPT { groupCount = groupCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setPGroups(const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV *pGroups_) VULKAN_HPP_NOEXCEPT { pGroups = pGroups_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) RayTracingPipelineCreateInfoNV &setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &groups_) VULKAN_HPP_NOEXCEPT { groupCount = static_cast(groups_.size()); pGroups = groups_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV &setMaxRecursionDepth(uint32_t maxRecursionDepth_) VULKAN_HPP_NOEXCEPT { maxRecursionDepth = maxRecursionDepth_; return *this; } VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV &setLayout(VULKAN_HPP_NAMESPACE::PipelineLayout layout_) VULKAN_HPP_NOEXCEPT { layout = layout_; return *this; } VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV &setBasePipelineHandle(VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_) VULKAN_HPP_NOEXCEPT { basePipelineHandle = basePipelineHandle_; return *this; } VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV &setBasePipelineIndex(int32_t basePipelineIndex_) VULKAN_HPP_NOEXCEPT { basePipelineIndex = basePipelineIndex_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkRayTracingPipelineCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkRayTracingPipelineCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, stageCount, pStages, groupCount, pGroups, maxRecursionDepth, layout, basePipelineHandle, basePipelineIndex); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(RayTracingPipelineCreateInfoNV const &) const = default; #else bool operator==(RayTracingPipelineCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (stageCount == rhs.stageCount) && (pStages == rhs.pStages) && (groupCount == rhs.groupCount) && (pGroups == rhs.pGroups) && (maxRecursionDepth == rhs.maxRecursionDepth) && (layout == rhs.layout) && (basePipelineHandle == rhs.basePipelineHandle) && (basePipelineIndex == rhs.basePipelineIndex); # endif } bool operator!=(RayTracingPipelineCreateInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoNV; const void *pNext = {}; VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; uint32_t stageCount = {}; const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo *pStages = {}; uint32_t groupCount = {}; const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV *pGroups = {}; uint32_t maxRecursionDepth = {}; VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; int32_t basePipelineIndex = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV) == sizeof(VkRayTracingPipelineCreateInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "RayTracingPipelineCreateInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = RayTracingPipelineCreateInfoNV; }; struct RefreshCycleDurationGOOGLE { using NativeType = VkRefreshCycleDurationGOOGLE; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE(uint64_t refreshDuration_ = {}) VULKAN_HPP_NOEXCEPT : refreshDuration(refreshDuration_) {} VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE(RefreshCycleDurationGOOGLE const &rhs) VULKAN_HPP_NOEXCEPT = default; RefreshCycleDurationGOOGLE(VkRefreshCycleDurationGOOGLE const &rhs) VULKAN_HPP_NOEXCEPT : RefreshCycleDurationGOOGLE(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ RefreshCycleDurationGOOGLE &operator=(RefreshCycleDurationGOOGLE const &rhs) VULKAN_HPP_NOEXCEPT = default; RefreshCycleDurationGOOGLE &operator=(VkRefreshCycleDurationGOOGLE const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkRefreshCycleDurationGOOGLE const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkRefreshCycleDurationGOOGLE &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(refreshDuration); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(RefreshCycleDurationGOOGLE const &) const = default; #else bool operator==(RefreshCycleDurationGOOGLE const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (refreshDuration == rhs.refreshDuration); # endif } bool operator!=(RefreshCycleDurationGOOGLE const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint64_t refreshDuration = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE) == sizeof(VkRefreshCycleDurationGOOGLE), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "RefreshCycleDurationGOOGLE is not nothrow_move_constructible!"); struct RenderPassAttachmentBeginInfo { using NativeType = VkRenderPassAttachmentBeginInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassAttachmentBeginInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo(uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageView *pAttachments_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), attachmentCount(attachmentCount_), pAttachments(pAttachments_) { } VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo(RenderPassAttachmentBeginInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; RenderPassAttachmentBeginInfo(VkRenderPassAttachmentBeginInfo const &rhs) VULKAN_HPP_NOEXCEPT : RenderPassAttachmentBeginInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) RenderPassAttachmentBeginInfo(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &attachments_, const void *pNext_ = nullptr) : pNext(pNext_) , attachmentCount(static_cast(attachments_.size())) , pAttachments(attachments_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ RenderPassAttachmentBeginInfo &operator=(RenderPassAttachmentBeginInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; RenderPassAttachmentBeginInfo &operator=(VkRenderPassAttachmentBeginInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo &setAttachmentCount(uint32_t attachmentCount_) VULKAN_HPP_NOEXCEPT { attachmentCount = attachmentCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo &setPAttachments(const VULKAN_HPP_NAMESPACE::ImageView *pAttachments_) VULKAN_HPP_NOEXCEPT { pAttachments = pAttachments_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) RenderPassAttachmentBeginInfo & setAttachments(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &attachments_) VULKAN_HPP_NOEXCEPT { attachmentCount = static_cast(attachments_.size()); pAttachments = attachments_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkRenderPassAttachmentBeginInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkRenderPassAttachmentBeginInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, attachmentCount, pAttachments); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(RenderPassAttachmentBeginInfo const &) const = default; #else bool operator==(RenderPassAttachmentBeginInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (attachmentCount == rhs.attachmentCount) && (pAttachments == rhs.pAttachments); # endif } bool operator!=(RenderPassAttachmentBeginInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassAttachmentBeginInfo; const void *pNext = {}; uint32_t attachmentCount = {}; const VULKAN_HPP_NAMESPACE::ImageView *pAttachments = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo) == sizeof(VkRenderPassAttachmentBeginInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "RenderPassAttachmentBeginInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = RenderPassAttachmentBeginInfo; }; using RenderPassAttachmentBeginInfoKHR = RenderPassAttachmentBeginInfo; struct RenderPassBeginInfo { using NativeType = VkRenderPassBeginInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassBeginInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo(VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, uint32_t clearValueCount_ = {}, const VULKAN_HPP_NAMESPACE::ClearValue *pClearValues_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), renderPass(renderPass_), framebuffer(framebuffer_), renderArea(renderArea_), clearValueCount(clearValueCount_), pClearValues(pClearValues_) { } VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo(RenderPassBeginInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; RenderPassBeginInfo(VkRenderPassBeginInfo const &rhs) VULKAN_HPP_NOEXCEPT : RenderPassBeginInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) RenderPassBeginInfo(VULKAN_HPP_NAMESPACE::RenderPass renderPass_, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_, VULKAN_HPP_NAMESPACE::Rect2D renderArea_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &clearValues_, const void *pNext_ = nullptr) : pNext(pNext_) , renderPass(renderPass_) , framebuffer(framebuffer_) , renderArea(renderArea_) , clearValueCount(static_cast(clearValues_.size())) , pClearValues(clearValues_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ RenderPassBeginInfo &operator=(RenderPassBeginInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; RenderPassBeginInfo &operator=(VkRenderPassBeginInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo &setRenderPass(VULKAN_HPP_NAMESPACE::RenderPass renderPass_) VULKAN_HPP_NOEXCEPT { renderPass = renderPass_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo &setFramebuffer(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_) VULKAN_HPP_NOEXCEPT { framebuffer = framebuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo &setRenderArea(VULKAN_HPP_NAMESPACE::Rect2D const &renderArea_) VULKAN_HPP_NOEXCEPT { renderArea = renderArea_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo &setClearValueCount(uint32_t clearValueCount_) VULKAN_HPP_NOEXCEPT { clearValueCount = clearValueCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo &setPClearValues(const VULKAN_HPP_NAMESPACE::ClearValue *pClearValues_) VULKAN_HPP_NOEXCEPT { pClearValues = pClearValues_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) RenderPassBeginInfo & setClearValues(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &clearValues_) VULKAN_HPP_NOEXCEPT { clearValueCount = static_cast(clearValues_.size()); pClearValues = clearValues_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, renderPass, framebuffer, renderArea, clearValueCount, pClearValues); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(RenderPassBeginInfo const &) const = default; #else bool operator==(RenderPassBeginInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (renderPass == rhs.renderPass) && (framebuffer == rhs.framebuffer) && (renderArea == rhs.renderArea) && (clearValueCount == rhs.clearValueCount) && (pClearValues == rhs.pClearValues); # endif } bool operator!=(RenderPassBeginInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassBeginInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {}; VULKAN_HPP_NAMESPACE::Rect2D renderArea = {}; uint32_t clearValueCount = {}; const VULKAN_HPP_NAMESPACE::ClearValue *pClearValues = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::RenderPassBeginInfo) == sizeof(VkRenderPassBeginInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "RenderPassBeginInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = RenderPassBeginInfo; }; struct SubpassDescription { using NativeType = VkSubpassDescription; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SubpassDescription(VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, uint32_t inputAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference *pInputAttachments_ = {}, uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference *pColorAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference *pResolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference *pDepthStencilAttachment_ = {}, uint32_t preserveAttachmentCount_ = {}, const uint32_t *pPreserveAttachments_ = {}) VULKAN_HPP_NOEXCEPT : flags(flags_), pipelineBindPoint(pipelineBindPoint_), inputAttachmentCount(inputAttachmentCount_), pInputAttachments(pInputAttachments_), colorAttachmentCount(colorAttachmentCount_), pColorAttachments(pColorAttachments_), pResolveAttachments(pResolveAttachments_), pDepthStencilAttachment(pDepthStencilAttachment_), preserveAttachmentCount(preserveAttachmentCount_), pPreserveAttachments(pPreserveAttachments_) { } VULKAN_HPP_CONSTEXPR SubpassDescription(SubpassDescription const &rhs) VULKAN_HPP_NOEXCEPT = default; SubpassDescription(VkSubpassDescription const &rhs) VULKAN_HPP_NOEXCEPT : SubpassDescription(*reinterpret_cast(&rhs)) {} # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SubpassDescription(VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &inputAttachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &colorAttachments_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &resolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference *pDepthStencilAttachment_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &preserveAttachments_ = {}) : flags(flags_) , pipelineBindPoint(pipelineBindPoint_) , inputAttachmentCount(static_cast(inputAttachments_.size())) , pInputAttachments(inputAttachments_.data()) , colorAttachmentCount(static_cast(colorAttachments_.size())) , pColorAttachments(colorAttachments_.data()) , pResolveAttachments(resolveAttachments_.data()) , pDepthStencilAttachment(pDepthStencilAttachment_) , preserveAttachmentCount(static_cast(preserveAttachments_.size())) , pPreserveAttachments(preserveAttachments_.data()) { # ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT(resolveAttachments_.empty() || (colorAttachments_.size() == resolveAttachments_.size())); # else if(!resolveAttachments_.empty() && (colorAttachments_.size() != resolveAttachments_.size())) { throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SubpassDescription::SubpassDescription: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )"); } # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SubpassDescription &operator=(SubpassDescription const &rhs) VULKAN_HPP_NOEXCEPT = default; SubpassDescription &operator=(VkSubpassDescription const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SubpassDescription &setFlags(VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDescription &setPipelineBindPoint(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_) VULKAN_HPP_NOEXCEPT { pipelineBindPoint = pipelineBindPoint_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDescription &setInputAttachmentCount(uint32_t inputAttachmentCount_) VULKAN_HPP_NOEXCEPT { inputAttachmentCount = inputAttachmentCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPInputAttachments(const VULKAN_HPP_NAMESPACE::AttachmentReference *pInputAttachments_) VULKAN_HPP_NOEXCEPT { pInputAttachments = pInputAttachments_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SubpassDescription &setInputAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &inputAttachments_) VULKAN_HPP_NOEXCEPT { inputAttachmentCount = static_cast(inputAttachments_.size()); pInputAttachments = inputAttachments_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 SubpassDescription &setColorAttachmentCount(uint32_t colorAttachmentCount_) VULKAN_HPP_NOEXCEPT { colorAttachmentCount = colorAttachmentCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPColorAttachments(const VULKAN_HPP_NAMESPACE::AttachmentReference *pColorAttachments_) VULKAN_HPP_NOEXCEPT { pColorAttachments = pColorAttachments_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SubpassDescription &setColorAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &colorAttachments_) VULKAN_HPP_NOEXCEPT { colorAttachmentCount = static_cast(colorAttachments_.size()); pColorAttachments = colorAttachments_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPResolveAttachments(const VULKAN_HPP_NAMESPACE::AttachmentReference *pResolveAttachments_) VULKAN_HPP_NOEXCEPT { pResolveAttachments = pResolveAttachments_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SubpassDescription &setResolveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &resolveAttachments_) VULKAN_HPP_NOEXCEPT { colorAttachmentCount = static_cast(resolveAttachments_.size()); pResolveAttachments = resolveAttachments_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPDepthStencilAttachment(const VULKAN_HPP_NAMESPACE::AttachmentReference *pDepthStencilAttachment_) VULKAN_HPP_NOEXCEPT { pDepthStencilAttachment = pDepthStencilAttachment_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDescription &setPreserveAttachmentCount(uint32_t preserveAttachmentCount_) VULKAN_HPP_NOEXCEPT { preserveAttachmentCount = preserveAttachmentCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDescription &setPPreserveAttachments(const uint32_t *pPreserveAttachments_) VULKAN_HPP_NOEXCEPT { pPreserveAttachments = pPreserveAttachments_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SubpassDescription & setPreserveAttachments(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &preserveAttachments_) VULKAN_HPP_NOEXCEPT { preserveAttachmentCount = static_cast(preserveAttachments_.size()); pPreserveAttachments = preserveAttachments_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSubpassDescription const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSubpassDescription &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(flags, pipelineBindPoint, inputAttachmentCount, pInputAttachments, colorAttachmentCount, pColorAttachments, pResolveAttachments, pDepthStencilAttachment, preserveAttachmentCount, pPreserveAttachments); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SubpassDescription const &) const = default; #else bool operator==(SubpassDescription const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (flags == rhs.flags) && (pipelineBindPoint == rhs.pipelineBindPoint) && (inputAttachmentCount == rhs.inputAttachmentCount) && (pInputAttachments == rhs.pInputAttachments) && (colorAttachmentCount == rhs.colorAttachmentCount) && (pColorAttachments == rhs.pColorAttachments) && (pResolveAttachments == rhs.pResolveAttachments) && (pDepthStencilAttachment == rhs.pDepthStencilAttachment) && (preserveAttachmentCount == rhs.preserveAttachmentCount) && (pPreserveAttachments == rhs.pPreserveAttachments); # endif } bool operator!=(SubpassDescription const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {}; VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; uint32_t inputAttachmentCount = {}; const VULKAN_HPP_NAMESPACE::AttachmentReference *pInputAttachments = {}; uint32_t colorAttachmentCount = {}; const VULKAN_HPP_NAMESPACE::AttachmentReference *pColorAttachments = {}; const VULKAN_HPP_NAMESPACE::AttachmentReference *pResolveAttachments = {}; const VULKAN_HPP_NAMESPACE::AttachmentReference *pDepthStencilAttachment = {}; uint32_t preserveAttachmentCount = {}; const uint32_t *pPreserveAttachments = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SubpassDescription) == sizeof(VkSubpassDescription), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SubpassDescription is not nothrow_move_constructible!"); struct SubpassDependency { using NativeType = VkSubpassDependency; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SubpassDependency(uint32_t srcSubpass_ = {}, uint32_t dstSubpass_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}) VULKAN_HPP_NOEXCEPT : srcSubpass(srcSubpass_), dstSubpass(dstSubpass_), srcStageMask(srcStageMask_), dstStageMask(dstStageMask_), srcAccessMask(srcAccessMask_), dstAccessMask(dstAccessMask_), dependencyFlags(dependencyFlags_) { } VULKAN_HPP_CONSTEXPR SubpassDependency(SubpassDependency const &rhs) VULKAN_HPP_NOEXCEPT = default; SubpassDependency(VkSubpassDependency const &rhs) VULKAN_HPP_NOEXCEPT : SubpassDependency(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SubpassDependency &operator=(SubpassDependency const &rhs) VULKAN_HPP_NOEXCEPT = default; SubpassDependency &operator=(VkSubpassDependency const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SubpassDependency &setSrcSubpass(uint32_t srcSubpass_) VULKAN_HPP_NOEXCEPT { srcSubpass = srcSubpass_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDependency &setDstSubpass(uint32_t dstSubpass_) VULKAN_HPP_NOEXCEPT { dstSubpass = dstSubpass_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDependency &setSrcStageMask(VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_) VULKAN_HPP_NOEXCEPT { srcStageMask = srcStageMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDependency &setDstStageMask(VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_) VULKAN_HPP_NOEXCEPT { dstStageMask = dstStageMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDependency &setSrcAccessMask(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_) VULKAN_HPP_NOEXCEPT { srcAccessMask = srcAccessMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDependency &setDstAccessMask(VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_) VULKAN_HPP_NOEXCEPT { dstAccessMask = dstAccessMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDependency &setDependencyFlags(VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_) VULKAN_HPP_NOEXCEPT { dependencyFlags = dependencyFlags_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSubpassDependency const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSubpassDependency &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(srcSubpass, dstSubpass, srcStageMask, dstStageMask, srcAccessMask, dstAccessMask, dependencyFlags); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SubpassDependency const &) const = default; #else bool operator==(SubpassDependency const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (srcSubpass == rhs.srcSubpass) && (dstSubpass == rhs.dstSubpass) && (srcStageMask == rhs.srcStageMask) && (dstStageMask == rhs.dstStageMask) && (srcAccessMask == rhs.srcAccessMask) && (dstAccessMask == rhs.dstAccessMask) && (dependencyFlags == rhs.dependencyFlags); # endif } bool operator!=(SubpassDependency const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t srcSubpass = {}; uint32_t dstSubpass = {}; VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {}; VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {}; VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SubpassDependency) == sizeof(VkSubpassDependency), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SubpassDependency is not nothrow_move_constructible!"); struct RenderPassCreateInfo { using NativeType = VkRenderPassCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR RenderPassCreateInfo(VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentDescription *pAttachments_ = {}, uint32_t subpassCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDescription *pSubpasses_ = {}, uint32_t dependencyCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDependency *pDependencies_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), attachmentCount(attachmentCount_), pAttachments(pAttachments_), subpassCount(subpassCount_), pSubpasses(pSubpasses_), dependencyCount(dependencyCount_), pDependencies(pDependencies_) { } VULKAN_HPP_CONSTEXPR RenderPassCreateInfo(RenderPassCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; RenderPassCreateInfo(VkRenderPassCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : RenderPassCreateInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) RenderPassCreateInfo(VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &attachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &subpasses_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &dependencies_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , attachmentCount(static_cast(attachments_.size())) , pAttachments(attachments_.data()) , subpassCount(static_cast(subpasses_.size())) , pSubpasses(subpasses_.data()) , dependencyCount(static_cast(dependencies_.size())) , pDependencies(dependencies_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ RenderPassCreateInfo &operator=(RenderPassCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; RenderPassCreateInfo &operator=(VkRenderPassCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo &setFlags(VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo &setAttachmentCount(uint32_t attachmentCount_) VULKAN_HPP_NOEXCEPT { attachmentCount = attachmentCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo &setPAttachments(const VULKAN_HPP_NAMESPACE::AttachmentDescription *pAttachments_) VULKAN_HPP_NOEXCEPT { pAttachments = pAttachments_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) RenderPassCreateInfo &setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &attachments_) VULKAN_HPP_NOEXCEPT { attachmentCount = static_cast(attachments_.size()); pAttachments = attachments_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo &setSubpassCount(uint32_t subpassCount_) VULKAN_HPP_NOEXCEPT { subpassCount = subpassCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo &setPSubpasses(const VULKAN_HPP_NAMESPACE::SubpassDescription *pSubpasses_) VULKAN_HPP_NOEXCEPT { pSubpasses = pSubpasses_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) RenderPassCreateInfo & setSubpasses(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &subpasses_) VULKAN_HPP_NOEXCEPT { subpassCount = static_cast(subpasses_.size()); pSubpasses = subpasses_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo &setDependencyCount(uint32_t dependencyCount_) VULKAN_HPP_NOEXCEPT { dependencyCount = dependencyCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo &setPDependencies(const VULKAN_HPP_NAMESPACE::SubpassDependency *pDependencies_) VULKAN_HPP_NOEXCEPT { pDependencies = pDependencies_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) RenderPassCreateInfo & setDependencies(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &dependencies_) VULKAN_HPP_NOEXCEPT { dependencyCount = static_cast(dependencies_.size()); pDependencies = dependencies_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkRenderPassCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkRenderPassCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, attachmentCount, pAttachments, subpassCount, pSubpasses, dependencyCount, pDependencies); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(RenderPassCreateInfo const &) const = default; #else bool operator==(RenderPassCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (attachmentCount == rhs.attachmentCount) && (pAttachments == rhs.pAttachments) && (subpassCount == rhs.subpassCount) && (pSubpasses == rhs.pSubpasses) && (dependencyCount == rhs.dependencyCount) && (pDependencies == rhs.pDependencies); # endif } bool operator!=(RenderPassCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {}; uint32_t attachmentCount = {}; const VULKAN_HPP_NAMESPACE::AttachmentDescription *pAttachments = {}; uint32_t subpassCount = {}; const VULKAN_HPP_NAMESPACE::SubpassDescription *pSubpasses = {}; uint32_t dependencyCount = {}; const VULKAN_HPP_NAMESPACE::SubpassDependency *pDependencies = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::RenderPassCreateInfo) == sizeof(VkRenderPassCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "RenderPassCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = RenderPassCreateInfo; }; struct SubpassDescription2 { using NativeType = VkSubpassDescription2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescription2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SubpassDescription2(VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, uint32_t viewMask_ = {}, uint32_t inputAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2 *pInputAttachments_ = {}, uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2 *pColorAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2 *pResolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2 *pDepthStencilAttachment_ = {}, uint32_t preserveAttachmentCount_ = {}, const uint32_t *pPreserveAttachments_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), pipelineBindPoint(pipelineBindPoint_), viewMask(viewMask_), inputAttachmentCount(inputAttachmentCount_), pInputAttachments(pInputAttachments_), colorAttachmentCount(colorAttachmentCount_), pColorAttachments(pColorAttachments_), pResolveAttachments(pResolveAttachments_), pDepthStencilAttachment(pDepthStencilAttachment_), preserveAttachmentCount(preserveAttachmentCount_), pPreserveAttachments(pPreserveAttachments_) { } VULKAN_HPP_CONSTEXPR SubpassDescription2(SubpassDescription2 const &rhs) VULKAN_HPP_NOEXCEPT = default; SubpassDescription2(VkSubpassDescription2 const &rhs) VULKAN_HPP_NOEXCEPT : SubpassDescription2(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SubpassDescription2(VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, uint32_t viewMask_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &inputAttachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &colorAttachments_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &resolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2 *pDepthStencilAttachment_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &preserveAttachments_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , pipelineBindPoint(pipelineBindPoint_) , viewMask(viewMask_) , inputAttachmentCount(static_cast(inputAttachments_.size())) , pInputAttachments(inputAttachments_.data()) , colorAttachmentCount(static_cast(colorAttachments_.size())) , pColorAttachments(colorAttachments_.data()) , pResolveAttachments(resolveAttachments_.data()) , pDepthStencilAttachment(pDepthStencilAttachment_) , preserveAttachmentCount(static_cast(preserveAttachments_.size())) , pPreserveAttachments(preserveAttachments_.data()) { # ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT(resolveAttachments_.empty() || (colorAttachments_.size() == resolveAttachments_.size())); # else if(!resolveAttachments_.empty() && (colorAttachments_.size() != resolveAttachments_.size())) { throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SubpassDescription2::SubpassDescription2: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )"); } # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SubpassDescription2 &operator=(SubpassDescription2 const &rhs) VULKAN_HPP_NOEXCEPT = default; SubpassDescription2 &operator=(VkSubpassDescription2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 &setFlags(VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 &setPipelineBindPoint(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_) VULKAN_HPP_NOEXCEPT { pipelineBindPoint = pipelineBindPoint_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 &setViewMask(uint32_t viewMask_) VULKAN_HPP_NOEXCEPT { viewMask = viewMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 &setInputAttachmentCount(uint32_t inputAttachmentCount_) VULKAN_HPP_NOEXCEPT { inputAttachmentCount = inputAttachmentCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPInputAttachments(const VULKAN_HPP_NAMESPACE::AttachmentReference2 *pInputAttachments_) VULKAN_HPP_NOEXCEPT { pInputAttachments = pInputAttachments_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SubpassDescription2 &setInputAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &inputAttachments_) VULKAN_HPP_NOEXCEPT { inputAttachmentCount = static_cast(inputAttachments_.size()); pInputAttachments = inputAttachments_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 &setColorAttachmentCount(uint32_t colorAttachmentCount_) VULKAN_HPP_NOEXCEPT { colorAttachmentCount = colorAttachmentCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPColorAttachments(const VULKAN_HPP_NAMESPACE::AttachmentReference2 *pColorAttachments_) VULKAN_HPP_NOEXCEPT { pColorAttachments = pColorAttachments_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SubpassDescription2 &setColorAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &colorAttachments_) VULKAN_HPP_NOEXCEPT { colorAttachmentCount = static_cast(colorAttachments_.size()); pColorAttachments = colorAttachments_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPResolveAttachments(const VULKAN_HPP_NAMESPACE::AttachmentReference2 *pResolveAttachments_) VULKAN_HPP_NOEXCEPT { pResolveAttachments = pResolveAttachments_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SubpassDescription2 &setResolveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &resolveAttachments_) VULKAN_HPP_NOEXCEPT { colorAttachmentCount = static_cast(resolveAttachments_.size()); pResolveAttachments = resolveAttachments_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPDepthStencilAttachment(const VULKAN_HPP_NAMESPACE::AttachmentReference2 *pDepthStencilAttachment_) VULKAN_HPP_NOEXCEPT { pDepthStencilAttachment = pDepthStencilAttachment_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 &setPreserveAttachmentCount(uint32_t preserveAttachmentCount_) VULKAN_HPP_NOEXCEPT { preserveAttachmentCount = preserveAttachmentCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 &setPPreserveAttachments(const uint32_t *pPreserveAttachments_) VULKAN_HPP_NOEXCEPT { pPreserveAttachments = pPreserveAttachments_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SubpassDescription2 & setPreserveAttachments(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &preserveAttachments_) VULKAN_HPP_NOEXCEPT { preserveAttachmentCount = static_cast(preserveAttachments_.size()); pPreserveAttachments = preserveAttachments_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSubpassDescription2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSubpassDescription2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, pipelineBindPoint, viewMask, inputAttachmentCount, pInputAttachments, colorAttachmentCount, pColorAttachments, pResolveAttachments, pDepthStencilAttachment, preserveAttachmentCount, pPreserveAttachments); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SubpassDescription2 const &) const = default; #else bool operator==(SubpassDescription2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (pipelineBindPoint == rhs.pipelineBindPoint) && (viewMask == rhs.viewMask) && (inputAttachmentCount == rhs.inputAttachmentCount) && (pInputAttachments == rhs.pInputAttachments) && (colorAttachmentCount == rhs.colorAttachmentCount) && (pColorAttachments == rhs.pColorAttachments) && (pResolveAttachments == rhs.pResolveAttachments) && (pDepthStencilAttachment == rhs.pDepthStencilAttachment) && (preserveAttachmentCount == rhs.preserveAttachmentCount) && (pPreserveAttachments == rhs.pPreserveAttachments); # endif } bool operator!=(SubpassDescription2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescription2; const void *pNext = {}; VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {}; VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; uint32_t viewMask = {}; uint32_t inputAttachmentCount = {}; const VULKAN_HPP_NAMESPACE::AttachmentReference2 *pInputAttachments = {}; uint32_t colorAttachmentCount = {}; const VULKAN_HPP_NAMESPACE::AttachmentReference2 *pColorAttachments = {}; const VULKAN_HPP_NAMESPACE::AttachmentReference2 *pResolveAttachments = {}; const VULKAN_HPP_NAMESPACE::AttachmentReference2 *pDepthStencilAttachment = {}; uint32_t preserveAttachmentCount = {}; const uint32_t *pPreserveAttachments = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SubpassDescription2) == sizeof(VkSubpassDescription2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SubpassDescription2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = SubpassDescription2; }; using SubpassDescription2KHR = SubpassDescription2; struct SubpassDependency2 { using NativeType = VkSubpassDependency2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDependency2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SubpassDependency2(uint32_t srcSubpass_ = {}, uint32_t dstSubpass_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, int32_t viewOffset_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), srcSubpass(srcSubpass_), dstSubpass(dstSubpass_), srcStageMask(srcStageMask_), dstStageMask(dstStageMask_), srcAccessMask(srcAccessMask_), dstAccessMask(dstAccessMask_), dependencyFlags(dependencyFlags_), viewOffset(viewOffset_) { } VULKAN_HPP_CONSTEXPR SubpassDependency2(SubpassDependency2 const &rhs) VULKAN_HPP_NOEXCEPT = default; SubpassDependency2(VkSubpassDependency2 const &rhs) VULKAN_HPP_NOEXCEPT : SubpassDependency2(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SubpassDependency2 &operator=(SubpassDependency2 const &rhs) VULKAN_HPP_NOEXCEPT = default; SubpassDependency2 &operator=(VkSubpassDependency2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 &setSrcSubpass(uint32_t srcSubpass_) VULKAN_HPP_NOEXCEPT { srcSubpass = srcSubpass_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 &setDstSubpass(uint32_t dstSubpass_) VULKAN_HPP_NOEXCEPT { dstSubpass = dstSubpass_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 &setSrcStageMask(VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_) VULKAN_HPP_NOEXCEPT { srcStageMask = srcStageMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 &setDstStageMask(VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_) VULKAN_HPP_NOEXCEPT { dstStageMask = dstStageMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 &setSrcAccessMask(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_) VULKAN_HPP_NOEXCEPT { srcAccessMask = srcAccessMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 &setDstAccessMask(VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_) VULKAN_HPP_NOEXCEPT { dstAccessMask = dstAccessMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 &setDependencyFlags(VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_) VULKAN_HPP_NOEXCEPT { dependencyFlags = dependencyFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 &setViewOffset(int32_t viewOffset_) VULKAN_HPP_NOEXCEPT { viewOffset = viewOffset_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSubpassDependency2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSubpassDependency2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, srcSubpass, dstSubpass, srcStageMask, dstStageMask, srcAccessMask, dstAccessMask, dependencyFlags, viewOffset); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SubpassDependency2 const &) const = default; #else bool operator==(SubpassDependency2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (srcSubpass == rhs.srcSubpass) && (dstSubpass == rhs.dstSubpass) && (srcStageMask == rhs.srcStageMask) && (dstStageMask == rhs.dstStageMask) && (srcAccessMask == rhs.srcAccessMask) && (dstAccessMask == rhs.dstAccessMask) && (dependencyFlags == rhs.dependencyFlags) && (viewOffset == rhs.viewOffset); # endif } bool operator!=(SubpassDependency2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDependency2; const void *pNext = {}; uint32_t srcSubpass = {}; uint32_t dstSubpass = {}; VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {}; VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {}; VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; int32_t viewOffset = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SubpassDependency2) == sizeof(VkSubpassDependency2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SubpassDependency2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = SubpassDependency2; }; using SubpassDependency2KHR = SubpassDependency2; struct RenderPassCreateInfo2 { using NativeType = VkRenderPassCreateInfo2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2(VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentDescription2 *pAttachments_ = {}, uint32_t subpassCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDescription2 *pSubpasses_ = {}, uint32_t dependencyCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDependency2 *pDependencies_ = {}, uint32_t correlatedViewMaskCount_ = {}, const uint32_t *pCorrelatedViewMasks_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), attachmentCount(attachmentCount_), pAttachments(pAttachments_), subpassCount(subpassCount_), pSubpasses(pSubpasses_), dependencyCount(dependencyCount_), pDependencies(pDependencies_), correlatedViewMaskCount(correlatedViewMaskCount_), pCorrelatedViewMasks(pCorrelatedViewMasks_) { } VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2(RenderPassCreateInfo2 const &rhs) VULKAN_HPP_NOEXCEPT = default; RenderPassCreateInfo2(VkRenderPassCreateInfo2 const &rhs) VULKAN_HPP_NOEXCEPT : RenderPassCreateInfo2(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) RenderPassCreateInfo2(VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &attachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &subpasses_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &dependencies_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &correlatedViewMasks_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , attachmentCount(static_cast(attachments_.size())) , pAttachments(attachments_.data()) , subpassCount(static_cast(subpasses_.size())) , pSubpasses(subpasses_.data()) , dependencyCount(static_cast(dependencies_.size())) , pDependencies(dependencies_.data()) , correlatedViewMaskCount(static_cast(correlatedViewMasks_.size())) , pCorrelatedViewMasks(correlatedViewMasks_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ RenderPassCreateInfo2 &operator=(RenderPassCreateInfo2 const &rhs) VULKAN_HPP_NOEXCEPT = default; RenderPassCreateInfo2 &operator=(VkRenderPassCreateInfo2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 &setFlags(VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 &setAttachmentCount(uint32_t attachmentCount_) VULKAN_HPP_NOEXCEPT { attachmentCount = attachmentCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 &setPAttachments(const VULKAN_HPP_NAMESPACE::AttachmentDescription2 *pAttachments_) VULKAN_HPP_NOEXCEPT { pAttachments = pAttachments_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) RenderPassCreateInfo2 &setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &attachments_) VULKAN_HPP_NOEXCEPT { attachmentCount = static_cast(attachments_.size()); pAttachments = attachments_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 &setSubpassCount(uint32_t subpassCount_) VULKAN_HPP_NOEXCEPT { subpassCount = subpassCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 &setPSubpasses(const VULKAN_HPP_NAMESPACE::SubpassDescription2 *pSubpasses_) VULKAN_HPP_NOEXCEPT { pSubpasses = pSubpasses_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) RenderPassCreateInfo2 & setSubpasses(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &subpasses_) VULKAN_HPP_NOEXCEPT { subpassCount = static_cast(subpasses_.size()); pSubpasses = subpasses_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 &setDependencyCount(uint32_t dependencyCount_) VULKAN_HPP_NOEXCEPT { dependencyCount = dependencyCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 &setPDependencies(const VULKAN_HPP_NAMESPACE::SubpassDependency2 *pDependencies_) VULKAN_HPP_NOEXCEPT { pDependencies = pDependencies_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) RenderPassCreateInfo2 & setDependencies(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &dependencies_) VULKAN_HPP_NOEXCEPT { dependencyCount = static_cast(dependencies_.size()); pDependencies = dependencies_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 &setCorrelatedViewMaskCount(uint32_t correlatedViewMaskCount_) VULKAN_HPP_NOEXCEPT { correlatedViewMaskCount = correlatedViewMaskCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 &setPCorrelatedViewMasks(const uint32_t *pCorrelatedViewMasks_) VULKAN_HPP_NOEXCEPT { pCorrelatedViewMasks = pCorrelatedViewMasks_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) RenderPassCreateInfo2 & setCorrelatedViewMasks(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &correlatedViewMasks_) VULKAN_HPP_NOEXCEPT { correlatedViewMaskCount = static_cast(correlatedViewMasks_.size()); pCorrelatedViewMasks = correlatedViewMasks_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkRenderPassCreateInfo2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkRenderPassCreateInfo2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, attachmentCount, pAttachments, subpassCount, pSubpasses, dependencyCount, pDependencies, correlatedViewMaskCount, pCorrelatedViewMasks); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(RenderPassCreateInfo2 const &) const = default; #else bool operator==(RenderPassCreateInfo2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (attachmentCount == rhs.attachmentCount) && (pAttachments == rhs.pAttachments) && (subpassCount == rhs.subpassCount) && (pSubpasses == rhs.pSubpasses) && (dependencyCount == rhs.dependencyCount) && (pDependencies == rhs.pDependencies) && (correlatedViewMaskCount == rhs.correlatedViewMaskCount) && (pCorrelatedViewMasks == rhs.pCorrelatedViewMasks); # endif } bool operator!=(RenderPassCreateInfo2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo2; const void *pNext = {}; VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {}; uint32_t attachmentCount = {}; const VULKAN_HPP_NAMESPACE::AttachmentDescription2 *pAttachments = {}; uint32_t subpassCount = {}; const VULKAN_HPP_NAMESPACE::SubpassDescription2 *pSubpasses = {}; uint32_t dependencyCount = {}; const VULKAN_HPP_NAMESPACE::SubpassDependency2 *pDependencies = {}; uint32_t correlatedViewMaskCount = {}; const uint32_t *pCorrelatedViewMasks = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2) == sizeof(VkRenderPassCreateInfo2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "RenderPassCreateInfo2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = RenderPassCreateInfo2; }; using RenderPassCreateInfo2KHR = RenderPassCreateInfo2; struct RenderPassFragmentDensityMapCreateInfoEXT { using NativeType = VkRenderPassFragmentDensityMapCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT(VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), fragmentDensityMapAttachment(fragmentDensityMapAttachment_) { } VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT(RenderPassFragmentDensityMapCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; RenderPassFragmentDensityMapCreateInfoEXT(VkRenderPassFragmentDensityMapCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : RenderPassFragmentDensityMapCreateInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ RenderPassFragmentDensityMapCreateInfoEXT &operator=(RenderPassFragmentDensityMapCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; RenderPassFragmentDensityMapCreateInfoEXT &operator=(VkRenderPassFragmentDensityMapCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapCreateInfoEXT & setFragmentDensityMapAttachment(VULKAN_HPP_NAMESPACE::AttachmentReference const &fragmentDensityMapAttachment_) VULKAN_HPP_NOEXCEPT { fragmentDensityMapAttachment = fragmentDensityMapAttachment_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkRenderPassFragmentDensityMapCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkRenderPassFragmentDensityMapCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, fragmentDensityMapAttachment); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(RenderPassFragmentDensityMapCreateInfoEXT const &) const = default; #else bool operator==(RenderPassFragmentDensityMapCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (fragmentDensityMapAttachment == rhs.fragmentDensityMapAttachment); # endif } bool operator!=(RenderPassFragmentDensityMapCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT) == sizeof(VkRenderPassFragmentDensityMapCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "RenderPassFragmentDensityMapCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = RenderPassFragmentDensityMapCreateInfoEXT; }; struct RenderPassInputAttachmentAspectCreateInfo { using NativeType = VkRenderPassInputAttachmentAspectCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassInputAttachmentAspectCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo(uint32_t aspectReferenceCount_ = {}, const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference *pAspectReferences_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), aspectReferenceCount(aspectReferenceCount_), pAspectReferences(pAspectReferences_) { } VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo(RenderPassInputAttachmentAspectCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; RenderPassInputAttachmentAspectCreateInfo(VkRenderPassInputAttachmentAspectCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : RenderPassInputAttachmentAspectCreateInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) RenderPassInputAttachmentAspectCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &aspectReferences_, const void *pNext_ = nullptr) : pNext(pNext_) , aspectReferenceCount(static_cast(aspectReferences_.size())) , pAspectReferences(aspectReferences_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ RenderPassInputAttachmentAspectCreateInfo &operator=(RenderPassInputAttachmentAspectCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; RenderPassInputAttachmentAspectCreateInfo &operator=(VkRenderPassInputAttachmentAspectCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo &setAspectReferenceCount(uint32_t aspectReferenceCount_) VULKAN_HPP_NOEXCEPT { aspectReferenceCount = aspectReferenceCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & setPAspectReferences(const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference *pAspectReferences_) VULKAN_HPP_NOEXCEPT { pAspectReferences = pAspectReferences_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) RenderPassInputAttachmentAspectCreateInfo &setAspectReferences( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &aspectReferences_) VULKAN_HPP_NOEXCEPT { aspectReferenceCount = static_cast(aspectReferences_.size()); pAspectReferences = aspectReferences_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkRenderPassInputAttachmentAspectCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkRenderPassInputAttachmentAspectCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, aspectReferenceCount, pAspectReferences); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(RenderPassInputAttachmentAspectCreateInfo const &) const = default; #else bool operator==(RenderPassInputAttachmentAspectCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (aspectReferenceCount == rhs.aspectReferenceCount) && (pAspectReferences == rhs.pAspectReferences); # endif } bool operator!=(RenderPassInputAttachmentAspectCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassInputAttachmentAspectCreateInfo; const void *pNext = {}; uint32_t aspectReferenceCount = {}; const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference *pAspectReferences = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo) == sizeof(VkRenderPassInputAttachmentAspectCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "RenderPassInputAttachmentAspectCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = RenderPassInputAttachmentAspectCreateInfo; }; using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo; struct RenderPassMultiviewCreateInfo { using NativeType = VkRenderPassMultiviewCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassMultiviewCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo(uint32_t subpassCount_ = {}, const uint32_t *pViewMasks_ = {}, uint32_t dependencyCount_ = {}, const int32_t *pViewOffsets_ = {}, uint32_t correlationMaskCount_ = {}, const uint32_t *pCorrelationMasks_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), subpassCount(subpassCount_), pViewMasks(pViewMasks_), dependencyCount(dependencyCount_), pViewOffsets(pViewOffsets_), correlationMaskCount(correlationMaskCount_), pCorrelationMasks(pCorrelationMasks_) { } VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo(RenderPassMultiviewCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; RenderPassMultiviewCreateInfo(VkRenderPassMultiviewCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : RenderPassMultiviewCreateInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) RenderPassMultiviewCreateInfo(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &viewMasks_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &viewOffsets_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &correlationMasks_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , subpassCount(static_cast(viewMasks_.size())) , pViewMasks(viewMasks_.data()) , dependencyCount(static_cast(viewOffsets_.size())) , pViewOffsets(viewOffsets_.data()) , correlationMaskCount(static_cast(correlationMasks_.size())) , pCorrelationMasks(correlationMasks_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ RenderPassMultiviewCreateInfo &operator=(RenderPassMultiviewCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; RenderPassMultiviewCreateInfo &operator=(VkRenderPassMultiviewCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo &setSubpassCount(uint32_t subpassCount_) VULKAN_HPP_NOEXCEPT { subpassCount = subpassCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo &setPViewMasks(const uint32_t *pViewMasks_) VULKAN_HPP_NOEXCEPT { pViewMasks = pViewMasks_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) RenderPassMultiviewCreateInfo &setViewMasks(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &viewMasks_) VULKAN_HPP_NOEXCEPT { subpassCount = static_cast(viewMasks_.size()); pViewMasks = viewMasks_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo &setDependencyCount(uint32_t dependencyCount_) VULKAN_HPP_NOEXCEPT { dependencyCount = dependencyCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo &setPViewOffsets(const int32_t *pViewOffsets_) VULKAN_HPP_NOEXCEPT { pViewOffsets = pViewOffsets_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) RenderPassMultiviewCreateInfo &setViewOffsets(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &viewOffsets_) VULKAN_HPP_NOEXCEPT { dependencyCount = static_cast(viewOffsets_.size()); pViewOffsets = viewOffsets_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo &setCorrelationMaskCount(uint32_t correlationMaskCount_) VULKAN_HPP_NOEXCEPT { correlationMaskCount = correlationMaskCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo &setPCorrelationMasks(const uint32_t *pCorrelationMasks_) VULKAN_HPP_NOEXCEPT { pCorrelationMasks = pCorrelationMasks_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) RenderPassMultiviewCreateInfo & setCorrelationMasks(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &correlationMasks_) VULKAN_HPP_NOEXCEPT { correlationMaskCount = static_cast(correlationMasks_.size()); pCorrelationMasks = correlationMasks_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkRenderPassMultiviewCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkRenderPassMultiviewCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, subpassCount, pViewMasks, dependencyCount, pViewOffsets, correlationMaskCount, pCorrelationMasks); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(RenderPassMultiviewCreateInfo const &) const = default; #else bool operator==(RenderPassMultiviewCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (subpassCount == rhs.subpassCount) && (pViewMasks == rhs.pViewMasks) && (dependencyCount == rhs.dependencyCount) && (pViewOffsets == rhs.pViewOffsets) && (correlationMaskCount == rhs.correlationMaskCount) && (pCorrelationMasks == rhs.pCorrelationMasks); # endif } bool operator!=(RenderPassMultiviewCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassMultiviewCreateInfo; const void *pNext = {}; uint32_t subpassCount = {}; const uint32_t *pViewMasks = {}; uint32_t dependencyCount = {}; const int32_t *pViewOffsets = {}; uint32_t correlationMaskCount = {}; const uint32_t *pCorrelationMasks = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo) == sizeof(VkRenderPassMultiviewCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "RenderPassMultiviewCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = RenderPassMultiviewCreateInfo; }; using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo; struct SubpassSampleLocationsEXT { using NativeType = VkSubpassSampleLocationsEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT(uint32_t subpassIndex_ = {}, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {}) VULKAN_HPP_NOEXCEPT : subpassIndex(subpassIndex_), sampleLocationsInfo(sampleLocationsInfo_) { } VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT(SubpassSampleLocationsEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; SubpassSampleLocationsEXT(VkSubpassSampleLocationsEXT const &rhs) VULKAN_HPP_NOEXCEPT : SubpassSampleLocationsEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SubpassSampleLocationsEXT &operator=(SubpassSampleLocationsEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; SubpassSampleLocationsEXT &operator=(VkSubpassSampleLocationsEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SubpassSampleLocationsEXT &setSubpassIndex(uint32_t subpassIndex_) VULKAN_HPP_NOEXCEPT { subpassIndex = subpassIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassSampleLocationsEXT & setSampleLocationsInfo(VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const &sampleLocationsInfo_) VULKAN_HPP_NOEXCEPT { sampleLocationsInfo = sampleLocationsInfo_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSubpassSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSubpassSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(subpassIndex, sampleLocationsInfo); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SubpassSampleLocationsEXT const &) const = default; #else bool operator==(SubpassSampleLocationsEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (subpassIndex == rhs.subpassIndex) && (sampleLocationsInfo == rhs.sampleLocationsInfo); # endif } bool operator!=(SubpassSampleLocationsEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t subpassIndex = {}; VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT) == sizeof(VkSubpassSampleLocationsEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SubpassSampleLocationsEXT is not nothrow_move_constructible!"); struct RenderPassSampleLocationsBeginInfoEXT { using NativeType = VkRenderPassSampleLocationsBeginInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassSampleLocationsBeginInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT(uint32_t attachmentInitialSampleLocationsCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT *pAttachmentInitialSampleLocations_ = {}, uint32_t postSubpassSampleLocationsCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT *pPostSubpassSampleLocations_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), attachmentInitialSampleLocationsCount(attachmentInitialSampleLocationsCount_), pAttachmentInitialSampleLocations(pAttachmentInitialSampleLocations_), postSubpassSampleLocationsCount(postSubpassSampleLocationsCount_), pPostSubpassSampleLocations(pPostSubpassSampleLocations_) { } VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT(RenderPassSampleLocationsBeginInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; RenderPassSampleLocationsBeginInfoEXT(VkRenderPassSampleLocationsBeginInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : RenderPassSampleLocationsBeginInfoEXT(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) RenderPassSampleLocationsBeginInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &attachmentInitialSampleLocations_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &postSubpassSampleLocations_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , attachmentInitialSampleLocationsCount(static_cast(attachmentInitialSampleLocations_.size())) , pAttachmentInitialSampleLocations(attachmentInitialSampleLocations_.data()) , postSubpassSampleLocationsCount(static_cast(postSubpassSampleLocations_.size())) , pPostSubpassSampleLocations(postSubpassSampleLocations_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ RenderPassSampleLocationsBeginInfoEXT &operator=(RenderPassSampleLocationsBeginInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; RenderPassSampleLocationsBeginInfoEXT &operator=(VkRenderPassSampleLocationsBeginInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & setAttachmentInitialSampleLocationsCount(uint32_t attachmentInitialSampleLocationsCount_) VULKAN_HPP_NOEXCEPT { attachmentInitialSampleLocationsCount = attachmentInitialSampleLocationsCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & setPAttachmentInitialSampleLocations(const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT *pAttachmentInitialSampleLocations_) VULKAN_HPP_NOEXCEPT { pAttachmentInitialSampleLocations = pAttachmentInitialSampleLocations_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) RenderPassSampleLocationsBeginInfoEXT &setAttachmentInitialSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &attachmentInitialSampleLocations_) VULKAN_HPP_NOEXCEPT { attachmentInitialSampleLocationsCount = static_cast(attachmentInitialSampleLocations_.size()); pAttachmentInitialSampleLocations = attachmentInitialSampleLocations_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & setPostSubpassSampleLocationsCount(uint32_t postSubpassSampleLocationsCount_) VULKAN_HPP_NOEXCEPT { postSubpassSampleLocationsCount = postSubpassSampleLocationsCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & setPPostSubpassSampleLocations(const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT *pPostSubpassSampleLocations_) VULKAN_HPP_NOEXCEPT { pPostSubpassSampleLocations = pPostSubpassSampleLocations_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) RenderPassSampleLocationsBeginInfoEXT &setPostSubpassSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &postSubpassSampleLocations_) VULKAN_HPP_NOEXCEPT { postSubpassSampleLocationsCount = static_cast(postSubpassSampleLocations_.size()); pPostSubpassSampleLocations = postSubpassSampleLocations_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkRenderPassSampleLocationsBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkRenderPassSampleLocationsBeginInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, attachmentInitialSampleLocationsCount, pAttachmentInitialSampleLocations, postSubpassSampleLocationsCount, pPostSubpassSampleLocations); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(RenderPassSampleLocationsBeginInfoEXT const &) const = default; #else bool operator==(RenderPassSampleLocationsBeginInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (attachmentInitialSampleLocationsCount == rhs.attachmentInitialSampleLocationsCount) && (pAttachmentInitialSampleLocations == rhs.pAttachmentInitialSampleLocations) && (postSubpassSampleLocationsCount == rhs.postSubpassSampleLocationsCount) && (pPostSubpassSampleLocations == rhs.pPostSubpassSampleLocations); # endif } bool operator!=(RenderPassSampleLocationsBeginInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSampleLocationsBeginInfoEXT; const void *pNext = {}; uint32_t attachmentInitialSampleLocationsCount = {}; const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT *pAttachmentInitialSampleLocations = {}; uint32_t postSubpassSampleLocationsCount = {}; const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT *pPostSubpassSampleLocations = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT) == sizeof(VkRenderPassSampleLocationsBeginInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "RenderPassSampleLocationsBeginInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = RenderPassSampleLocationsBeginInfoEXT; }; struct RenderPassTransformBeginInfoQCOM { using NativeType = VkRenderPassTransformBeginInfoQCOM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassTransformBeginInfoQCOM; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), transform(transform_) { } VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM(RenderPassTransformBeginInfoQCOM const &rhs) VULKAN_HPP_NOEXCEPT = default; RenderPassTransformBeginInfoQCOM(VkRenderPassTransformBeginInfoQCOM const &rhs) VULKAN_HPP_NOEXCEPT : RenderPassTransformBeginInfoQCOM(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ RenderPassTransformBeginInfoQCOM &operator=(RenderPassTransformBeginInfoQCOM const &rhs) VULKAN_HPP_NOEXCEPT = default; RenderPassTransformBeginInfoQCOM &operator=(VkRenderPassTransformBeginInfoQCOM const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 RenderPassTransformBeginInfoQCOM &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderPassTransformBeginInfoQCOM &setTransform(VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_) VULKAN_HPP_NOEXCEPT { transform = transform_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkRenderPassTransformBeginInfoQCOM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkRenderPassTransformBeginInfoQCOM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, transform); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(RenderPassTransformBeginInfoQCOM const &) const = default; #else bool operator==(RenderPassTransformBeginInfoQCOM const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (transform == rhs.transform); # endif } bool operator!=(RenderPassTransformBeginInfoQCOM const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassTransformBeginInfoQCOM; void *pNext = {}; VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM) == sizeof(VkRenderPassTransformBeginInfoQCOM), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "RenderPassTransformBeginInfoQCOM is not nothrow_move_constructible!"); template<> struct CppType { using Type = RenderPassTransformBeginInfoQCOM; }; struct RenderingAttachmentInfo { using NativeType = VkRenderingAttachmentInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingAttachmentInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo(VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, VULKAN_HPP_NAMESPACE::ImageView resolveImageView_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), imageView(imageView_), imageLayout(imageLayout_), resolveMode(resolveMode_), resolveImageView(resolveImageView_), resolveImageLayout(resolveImageLayout_), loadOp(loadOp_), storeOp(storeOp_), clearValue(clearValue_) { } VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo(RenderingAttachmentInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; RenderingAttachmentInfo(VkRenderingAttachmentInfo const &rhs) VULKAN_HPP_NOEXCEPT : RenderingAttachmentInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ RenderingAttachmentInfo &operator=(RenderingAttachmentInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; RenderingAttachmentInfo &operator=(VkRenderingAttachmentInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo &setImageView(VULKAN_HPP_NAMESPACE::ImageView imageView_) VULKAN_HPP_NOEXCEPT { imageView = imageView_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo &setImageLayout(VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_) VULKAN_HPP_NOEXCEPT { imageLayout = imageLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo &setResolveMode(VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode_) VULKAN_HPP_NOEXCEPT { resolveMode = resolveMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo &setResolveImageView(VULKAN_HPP_NAMESPACE::ImageView resolveImageView_) VULKAN_HPP_NOEXCEPT { resolveImageView = resolveImageView_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo &setResolveImageLayout(VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout_) VULKAN_HPP_NOEXCEPT { resolveImageLayout = resolveImageLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo &setLoadOp(VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_) VULKAN_HPP_NOEXCEPT { loadOp = loadOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo &setStoreOp(VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_) VULKAN_HPP_NOEXCEPT { storeOp = storeOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo &setClearValue(VULKAN_HPP_NAMESPACE::ClearValue const &clearValue_) VULKAN_HPP_NOEXCEPT { clearValue = clearValue_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkRenderingAttachmentInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkRenderingAttachmentInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, imageView, imageLayout, resolveMode, resolveImageView, resolveImageLayout, loadOp, storeOp, clearValue); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingAttachmentInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ImageView imageView = {}; VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone; VULKAN_HPP_NAMESPACE::ImageView resolveImageView = {}; VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; VULKAN_HPP_NAMESPACE::ClearValue clearValue = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo) == sizeof(VkRenderingAttachmentInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "RenderingAttachmentInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = RenderingAttachmentInfo; }; using RenderingAttachmentInfoKHR = RenderingAttachmentInfo; struct RenderingFragmentDensityMapAttachmentInfoEXT { using NativeType = VkRenderingFragmentDensityMapAttachmentInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR RenderingFragmentDensityMapAttachmentInfoEXT(VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), imageView(imageView_), imageLayout(imageLayout_) { } VULKAN_HPP_CONSTEXPR RenderingFragmentDensityMapAttachmentInfoEXT(RenderingFragmentDensityMapAttachmentInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; RenderingFragmentDensityMapAttachmentInfoEXT(VkRenderingFragmentDensityMapAttachmentInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : RenderingFragmentDensityMapAttachmentInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ RenderingFragmentDensityMapAttachmentInfoEXT &operator=(RenderingFragmentDensityMapAttachmentInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; RenderingFragmentDensityMapAttachmentInfoEXT &operator=(VkRenderingFragmentDensityMapAttachmentInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT &setImageView(VULKAN_HPP_NAMESPACE::ImageView imageView_) VULKAN_HPP_NOEXCEPT { imageView = imageView_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT &setImageLayout(VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_) VULKAN_HPP_NOEXCEPT { imageLayout = imageLayout_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkRenderingFragmentDensityMapAttachmentInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkRenderingFragmentDensityMapAttachmentInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, imageView, imageLayout); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(RenderingFragmentDensityMapAttachmentInfoEXT const &) const = default; #else bool operator==(RenderingFragmentDensityMapAttachmentInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (imageView == rhs.imageView) && (imageLayout == rhs.imageLayout); # endif } bool operator!=(RenderingFragmentDensityMapAttachmentInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ImageView imageView = {}; VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT) == sizeof(VkRenderingFragmentDensityMapAttachmentInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "RenderingFragmentDensityMapAttachmentInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = RenderingFragmentDensityMapAttachmentInfoEXT; }; struct RenderingFragmentShadingRateAttachmentInfoKHR { using NativeType = VkRenderingFragmentShadingRateAttachmentInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR RenderingFragmentShadingRateAttachmentInfoKHR(VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), imageView(imageView_), imageLayout(imageLayout_), shadingRateAttachmentTexelSize(shadingRateAttachmentTexelSize_) { } VULKAN_HPP_CONSTEXPR RenderingFragmentShadingRateAttachmentInfoKHR(RenderingFragmentShadingRateAttachmentInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; RenderingFragmentShadingRateAttachmentInfoKHR(VkRenderingFragmentShadingRateAttachmentInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : RenderingFragmentShadingRateAttachmentInfoKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ RenderingFragmentShadingRateAttachmentInfoKHR &operator=(RenderingFragmentShadingRateAttachmentInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; RenderingFragmentShadingRateAttachmentInfoKHR &operator=(VkRenderingFragmentShadingRateAttachmentInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR &setImageView(VULKAN_HPP_NAMESPACE::ImageView imageView_) VULKAN_HPP_NOEXCEPT { imageView = imageView_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR &setImageLayout(VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_) VULKAN_HPP_NOEXCEPT { imageLayout = imageLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setShadingRateAttachmentTexelSize(VULKAN_HPP_NAMESPACE::Extent2D const &shadingRateAttachmentTexelSize_) VULKAN_HPP_NOEXCEPT { shadingRateAttachmentTexelSize = shadingRateAttachmentTexelSize_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkRenderingFragmentShadingRateAttachmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkRenderingFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, imageView, imageLayout, shadingRateAttachmentTexelSize); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(RenderingFragmentShadingRateAttachmentInfoKHR const &) const = default; #else bool operator==(RenderingFragmentShadingRateAttachmentInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (imageView == rhs.imageView) && (imageLayout == rhs.imageLayout) && (shadingRateAttachmentTexelSize == rhs.shadingRateAttachmentTexelSize); # endif } bool operator!=(RenderingFragmentShadingRateAttachmentInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ImageView imageView = {}; VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR) == sizeof(VkRenderingFragmentShadingRateAttachmentInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "RenderingFragmentShadingRateAttachmentInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = RenderingFragmentShadingRateAttachmentInfoKHR; }; struct RenderingInfo { using NativeType = VkRenderingInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 RenderingInfo(VULKAN_HPP_NAMESPACE::RenderingFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, uint32_t layerCount_ = {}, uint32_t viewMask_ = {}, uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo *pColorAttachments_ = {}, const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo *pDepthAttachment_ = {}, const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo *pStencilAttachment_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), renderArea(renderArea_), layerCount(layerCount_), viewMask(viewMask_), colorAttachmentCount(colorAttachmentCount_), pColorAttachments(pColorAttachments_), pDepthAttachment(pDepthAttachment_), pStencilAttachment(pStencilAttachment_) { } VULKAN_HPP_CONSTEXPR_14 RenderingInfo(RenderingInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; RenderingInfo(VkRenderingInfo const &rhs) VULKAN_HPP_NOEXCEPT : RenderingInfo(*reinterpret_cast(&rhs)) {} # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) RenderingInfo(VULKAN_HPP_NAMESPACE::RenderingFlags flags_, VULKAN_HPP_NAMESPACE::Rect2D renderArea_, uint32_t layerCount_, uint32_t viewMask_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &colorAttachments_, const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo *pDepthAttachment_ = {}, const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo *pStencilAttachment_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , renderArea(renderArea_) , layerCount(layerCount_) , viewMask(viewMask_) , colorAttachmentCount(static_cast(colorAttachments_.size())) , pColorAttachments(colorAttachments_.data()) , pDepthAttachment(pDepthAttachment_) , pStencilAttachment(pStencilAttachment_) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ RenderingInfo &operator=(RenderingInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; RenderingInfo &operator=(VkRenderingInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 RenderingInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderingInfo &setFlags(VULKAN_HPP_NAMESPACE::RenderingFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderingInfo &setRenderArea(VULKAN_HPP_NAMESPACE::Rect2D const &renderArea_) VULKAN_HPP_NOEXCEPT { renderArea = renderArea_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderingInfo &setLayerCount(uint32_t layerCount_) VULKAN_HPP_NOEXCEPT { layerCount = layerCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderingInfo &setViewMask(uint32_t viewMask_) VULKAN_HPP_NOEXCEPT { viewMask = viewMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderingInfo &setColorAttachmentCount(uint32_t colorAttachmentCount_) VULKAN_HPP_NOEXCEPT { colorAttachmentCount = colorAttachmentCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderingInfo &setPColorAttachments(const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo *pColorAttachments_) VULKAN_HPP_NOEXCEPT { pColorAttachments = pColorAttachments_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) RenderingInfo &setColorAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &colorAttachments_) VULKAN_HPP_NOEXCEPT { colorAttachmentCount = static_cast(colorAttachments_.size()); pColorAttachments = colorAttachments_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 RenderingInfo &setPDepthAttachment(const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo *pDepthAttachment_) VULKAN_HPP_NOEXCEPT { pDepthAttachment = pDepthAttachment_; return *this; } VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPStencilAttachment(const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo *pStencilAttachment_) VULKAN_HPP_NOEXCEPT { pStencilAttachment = pStencilAttachment_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkRenderingInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkRenderingInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, renderArea, layerCount, viewMask, colorAttachmentCount, pColorAttachments, pDepthAttachment, pStencilAttachment); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(RenderingInfo const &) const = default; #else bool operator==(RenderingInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (renderArea == rhs.renderArea) && (layerCount == rhs.layerCount) && (viewMask == rhs.viewMask) && (colorAttachmentCount == rhs.colorAttachmentCount) && (pColorAttachments == rhs.pColorAttachments) && (pDepthAttachment == rhs.pDepthAttachment) && (pStencilAttachment == rhs.pStencilAttachment); # endif } bool operator!=(RenderingInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::RenderingFlags flags = {}; VULKAN_HPP_NAMESPACE::Rect2D renderArea = {}; uint32_t layerCount = {}; uint32_t viewMask = {}; uint32_t colorAttachmentCount = {}; const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo *pColorAttachments = {}; const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo *pDepthAttachment = {}; const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo *pStencilAttachment = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::RenderingInfo) == sizeof(VkRenderingInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "RenderingInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = RenderingInfo; }; using RenderingInfoKHR = RenderingInfo; struct ResolveImageInfo2 { using NativeType = VkResolveImageInfo2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eResolveImageInfo2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ResolveImageInfo2(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageResolve2 *pRegions_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), srcImage(srcImage_), srcImageLayout(srcImageLayout_), dstImage(dstImage_), dstImageLayout(dstImageLayout_), regionCount(regionCount_), pRegions(pRegions_) { } VULKAN_HPP_CONSTEXPR ResolveImageInfo2(ResolveImageInfo2 const &rhs) VULKAN_HPP_NOEXCEPT = default; ResolveImageInfo2(VkResolveImageInfo2 const &rhs) VULKAN_HPP_NOEXCEPT : ResolveImageInfo2(*reinterpret_cast(&rhs)) {} # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) ResolveImageInfo2(VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const ®ions_, const void *pNext_ = nullptr) : pNext(pNext_) , srcImage(srcImage_) , srcImageLayout(srcImageLayout_) , dstImage(dstImage_) , dstImageLayout(dstImageLayout_) , regionCount(static_cast(regions_.size())) , pRegions(regions_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ResolveImageInfo2 &operator=(ResolveImageInfo2 const &rhs) VULKAN_HPP_NOEXCEPT = default; ResolveImageInfo2 &operator=(VkResolveImageInfo2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 &setSrcImage(VULKAN_HPP_NAMESPACE::Image srcImage_) VULKAN_HPP_NOEXCEPT { srcImage = srcImage_; return *this; } VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 &setSrcImageLayout(VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_) VULKAN_HPP_NOEXCEPT { srcImageLayout = srcImageLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 &setDstImage(VULKAN_HPP_NAMESPACE::Image dstImage_) VULKAN_HPP_NOEXCEPT { dstImage = dstImage_; return *this; } VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 &setDstImageLayout(VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_) VULKAN_HPP_NOEXCEPT { dstImageLayout = dstImageLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 &setRegionCount(uint32_t regionCount_) VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 &setPRegions(const VULKAN_HPP_NAMESPACE::ImageResolve2 *pRegions_) VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) ResolveImageInfo2 & setRegions(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const ®ions_) VULKAN_HPP_NOEXCEPT { regionCount = static_cast(regions_.size()); pRegions = regions_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkResolveImageInfo2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkResolveImageInfo2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ResolveImageInfo2 const &) const = default; #else bool operator==(ResolveImageInfo2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (srcImage == rhs.srcImage) && (srcImageLayout == rhs.srcImageLayout) && (dstImage == rhs.dstImage) && (dstImageLayout == rhs.dstImageLayout) && (regionCount == rhs.regionCount) && (pRegions == rhs.pRegions); # endif } bool operator!=(ResolveImageInfo2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eResolveImageInfo2; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Image srcImage = {}; VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; VULKAN_HPP_NAMESPACE::Image dstImage = {}; VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; uint32_t regionCount = {}; const VULKAN_HPP_NAMESPACE::ImageResolve2 *pRegions = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ResolveImageInfo2) == sizeof(VkResolveImageInfo2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ResolveImageInfo2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = ResolveImageInfo2; }; using ResolveImageInfo2KHR = ResolveImageInfo2; struct SamplerBorderColorComponentMappingCreateInfoEXT { using NativeType = VkSamplerBorderColorComponentMappingCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SamplerBorderColorComponentMappingCreateInfoEXT(VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, VULKAN_HPP_NAMESPACE::Bool32 srgb_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), components(components_), srgb(srgb_) { } VULKAN_HPP_CONSTEXPR SamplerBorderColorComponentMappingCreateInfoEXT(SamplerBorderColorComponentMappingCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; SamplerBorderColorComponentMappingCreateInfoEXT(VkSamplerBorderColorComponentMappingCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : SamplerBorderColorComponentMappingCreateInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SamplerBorderColorComponentMappingCreateInfoEXT &operator=(SamplerBorderColorComponentMappingCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; SamplerBorderColorComponentMappingCreateInfoEXT &operator=(VkSamplerBorderColorComponentMappingCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT & setComponents(VULKAN_HPP_NAMESPACE::ComponentMapping const &components_) VULKAN_HPP_NOEXCEPT { components = components_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT &setSrgb(VULKAN_HPP_NAMESPACE::Bool32 srgb_) VULKAN_HPP_NOEXCEPT { srgb = srgb_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSamplerBorderColorComponentMappingCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSamplerBorderColorComponentMappingCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, components, srgb); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SamplerBorderColorComponentMappingCreateInfoEXT const &) const = default; #else bool operator==(SamplerBorderColorComponentMappingCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (components == rhs.components) && (srgb == rhs.srgb); # endif } bool operator!=(SamplerBorderColorComponentMappingCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; VULKAN_HPP_NAMESPACE::Bool32 srgb = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT) == sizeof(VkSamplerBorderColorComponentMappingCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SamplerBorderColorComponentMappingCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = SamplerBorderColorComponentMappingCreateInfoEXT; }; struct SamplerCreateInfo { using NativeType = VkSamplerCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SamplerCreateInfo(VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Filter magFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, VULKAN_HPP_NAMESPACE::Filter minFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest, VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, float mipLodBias_ = {}, VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ = {}, float maxAnisotropy_ = {}, VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ = {}, VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, float minLod_ = {}, float maxLod_ = {}, VULKAN_HPP_NAMESPACE::BorderColor borderColor_ = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack, VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), magFilter(magFilter_), minFilter(minFilter_), mipmapMode(mipmapMode_), addressModeU(addressModeU_), addressModeV(addressModeV_), addressModeW(addressModeW_), mipLodBias(mipLodBias_), anisotropyEnable(anisotropyEnable_), maxAnisotropy(maxAnisotropy_), compareEnable(compareEnable_), compareOp(compareOp_), minLod(minLod_), maxLod(maxLod_), borderColor(borderColor_), unnormalizedCoordinates(unnormalizedCoordinates_) { } VULKAN_HPP_CONSTEXPR SamplerCreateInfo(SamplerCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SamplerCreateInfo(VkSamplerCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : SamplerCreateInfo(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SamplerCreateInfo &operator=(SamplerCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SamplerCreateInfo &operator=(VkSamplerCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo &setFlags(VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo &setMagFilter(VULKAN_HPP_NAMESPACE::Filter magFilter_) VULKAN_HPP_NOEXCEPT { magFilter = magFilter_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo &setMinFilter(VULKAN_HPP_NAMESPACE::Filter minFilter_) VULKAN_HPP_NOEXCEPT { minFilter = minFilter_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo &setMipmapMode(VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_) VULKAN_HPP_NOEXCEPT { mipmapMode = mipmapMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo &setAddressModeU(VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_) VULKAN_HPP_NOEXCEPT { addressModeU = addressModeU_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo &setAddressModeV(VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_) VULKAN_HPP_NOEXCEPT { addressModeV = addressModeV_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo &setAddressModeW(VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_) VULKAN_HPP_NOEXCEPT { addressModeW = addressModeW_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo &setMipLodBias(float mipLodBias_) VULKAN_HPP_NOEXCEPT { mipLodBias = mipLodBias_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo &setAnisotropyEnable(VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_) VULKAN_HPP_NOEXCEPT { anisotropyEnable = anisotropyEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo &setMaxAnisotropy(float maxAnisotropy_) VULKAN_HPP_NOEXCEPT { maxAnisotropy = maxAnisotropy_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo &setCompareEnable(VULKAN_HPP_NAMESPACE::Bool32 compareEnable_) VULKAN_HPP_NOEXCEPT { compareEnable = compareEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo &setCompareOp(VULKAN_HPP_NAMESPACE::CompareOp compareOp_) VULKAN_HPP_NOEXCEPT { compareOp = compareOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo &setMinLod(float minLod_) VULKAN_HPP_NOEXCEPT { minLod = minLod_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo &setMaxLod(float maxLod_) VULKAN_HPP_NOEXCEPT { maxLod = maxLod_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo &setBorderColor(VULKAN_HPP_NAMESPACE::BorderColor borderColor_) VULKAN_HPP_NOEXCEPT { borderColor = borderColor_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo &setUnnormalizedCoordinates(VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_) VULKAN_HPP_NOEXCEPT { unnormalizedCoordinates = unnormalizedCoordinates_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSamplerCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSamplerCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, magFilter, minFilter, mipmapMode, addressModeU, addressModeV, addressModeW, mipLodBias, anisotropyEnable, maxAnisotropy, compareEnable, compareOp, minLod, maxLod, borderColor, unnormalizedCoordinates); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SamplerCreateInfo const &) const = default; #else bool operator==(SamplerCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (magFilter == rhs.magFilter) && (minFilter == rhs.minFilter) && (mipmapMode == rhs.mipmapMode) && (addressModeU == rhs.addressModeU) && (addressModeV == rhs.addressModeV) && (addressModeW == rhs.addressModeW) && (mipLodBias == rhs.mipLodBias) && (anisotropyEnable == rhs.anisotropyEnable) && (maxAnisotropy == rhs.maxAnisotropy) && (compareEnable == rhs.compareEnable) && (compareOp == rhs.compareOp) && (minLod == rhs.minLod) && (maxLod == rhs.maxLod) && (borderColor == rhs.borderColor) && (unnormalizedCoordinates == rhs.unnormalizedCoordinates); # endif } bool operator!=(SamplerCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags = {}; VULKAN_HPP_NAMESPACE::Filter magFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; VULKAN_HPP_NAMESPACE::Filter minFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest; VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; float mipLodBias = {}; VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable = {}; float maxAnisotropy = {}; VULKAN_HPP_NAMESPACE::Bool32 compareEnable = {}; VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; float minLod = {}; float maxLod = {}; VULKAN_HPP_NAMESPACE::BorderColor borderColor = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack; VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SamplerCreateInfo) == sizeof(VkSamplerCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SamplerCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = SamplerCreateInfo; }; struct SamplerCustomBorderColorCreateInfoEXT { using NativeType = VkSamplerCustomBorderColorCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCustomBorderColorCreateInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT(VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), customBorderColor(customBorderColor_), format(format_) { } VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT(SamplerCustomBorderColorCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; SamplerCustomBorderColorCreateInfoEXT(VkSamplerCustomBorderColorCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : SamplerCustomBorderColorCreateInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SamplerCustomBorderColorCreateInfoEXT &operator=(SamplerCustomBorderColorCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; SamplerCustomBorderColorCreateInfoEXT &operator=(VkSamplerCustomBorderColorCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & setCustomBorderColor(VULKAN_HPP_NAMESPACE::ClearColorValue const &customBorderColor_) VULKAN_HPP_NOEXCEPT { customBorderColor = customBorderColor_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT &setFormat(VULKAN_HPP_NAMESPACE::Format format_) VULKAN_HPP_NOEXCEPT { format = format_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSamplerCustomBorderColorCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSamplerCustomBorderColorCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, customBorderColor, format); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCustomBorderColorCreateInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor = {}; VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT) == sizeof(VkSamplerCustomBorderColorCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SamplerCustomBorderColorCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = SamplerCustomBorderColorCreateInfoEXT; }; struct SamplerReductionModeCreateInfo { using NativeType = VkSamplerReductionModeCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerReductionModeCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo(VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), reductionMode(reductionMode_) { } VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo(SamplerReductionModeCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SamplerReductionModeCreateInfo(VkSamplerReductionModeCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : SamplerReductionModeCreateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SamplerReductionModeCreateInfo &operator=(SamplerReductionModeCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SamplerReductionModeCreateInfo &operator=(VkSamplerReductionModeCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SamplerReductionModeCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerReductionModeCreateInfo &setReductionMode(VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_) VULKAN_HPP_NOEXCEPT { reductionMode = reductionMode_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSamplerReductionModeCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSamplerReductionModeCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, reductionMode); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SamplerReductionModeCreateInfo const &) const = default; #else bool operator==(SamplerReductionModeCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (reductionMode == rhs.reductionMode); # endif } bool operator!=(SamplerReductionModeCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerReductionModeCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo) == sizeof(VkSamplerReductionModeCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SamplerReductionModeCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = SamplerReductionModeCreateInfo; }; using SamplerReductionModeCreateInfoEXT = SamplerReductionModeCreateInfo; struct SamplerYcbcrConversionCreateInfo { using NativeType = VkSamplerYcbcrConversionCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::Filter chromaFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), format(format_), ycbcrModel(ycbcrModel_), ycbcrRange(ycbcrRange_), components(components_), xChromaOffset(xChromaOffset_), yChromaOffset(yChromaOffset_), chromaFilter(chromaFilter_), forceExplicitReconstruction(forceExplicitReconstruction_) { } VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo(SamplerYcbcrConversionCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SamplerYcbcrConversionCreateInfo(VkSamplerYcbcrConversionCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : SamplerYcbcrConversionCreateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SamplerYcbcrConversionCreateInfo &operator=(SamplerYcbcrConversionCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SamplerYcbcrConversionCreateInfo &operator=(VkSamplerYcbcrConversionCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo &setFormat(VULKAN_HPP_NAMESPACE::Format format_) VULKAN_HPP_NOEXCEPT { format = format_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setYcbcrModel(VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_) VULKAN_HPP_NOEXCEPT { ycbcrModel = ycbcrModel_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo &setYcbcrRange(VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_) VULKAN_HPP_NOEXCEPT { ycbcrRange = ycbcrRange_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo &setComponents(VULKAN_HPP_NAMESPACE::ComponentMapping const &components_) VULKAN_HPP_NOEXCEPT { components = components_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo &setXChromaOffset(VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_) VULKAN_HPP_NOEXCEPT { xChromaOffset = xChromaOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo &setYChromaOffset(VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_) VULKAN_HPP_NOEXCEPT { yChromaOffset = yChromaOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo &setChromaFilter(VULKAN_HPP_NAMESPACE::Filter chromaFilter_) VULKAN_HPP_NOEXCEPT { chromaFilter = chromaFilter_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setForceExplicitReconstruction(VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_) VULKAN_HPP_NOEXCEPT { forceExplicitReconstruction = forceExplicitReconstruction_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSamplerYcbcrConversionCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSamplerYcbcrConversionCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, format, ycbcrModel, ycbcrRange, components, xChromaOffset, yChromaOffset, chromaFilter, forceExplicitReconstruction); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SamplerYcbcrConversionCreateInfo const &) const = default; #else bool operator==(SamplerYcbcrConversionCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (format == rhs.format) && (ycbcrModel == rhs.ycbcrModel) && (ycbcrRange == rhs.ycbcrRange) && (components == rhs.components) && (xChromaOffset == rhs.xChromaOffset) && (yChromaOffset == rhs.yChromaOffset) && (chromaFilter == rhs.chromaFilter) && (forceExplicitReconstruction == rhs.forceExplicitReconstruction); # endif } bool operator!=(SamplerYcbcrConversionCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; VULKAN_HPP_NAMESPACE::Filter chromaFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo) == sizeof(VkSamplerYcbcrConversionCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SamplerYcbcrConversionCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = SamplerYcbcrConversionCreateInfo; }; using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo; struct SamplerYcbcrConversionImageFormatProperties { using NativeType = VkSamplerYcbcrConversionImageFormatProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionImageFormatProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties(uint32_t combinedImageSamplerDescriptorCount_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), combinedImageSamplerDescriptorCount(combinedImageSamplerDescriptorCount_) { } VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties(SamplerYcbcrConversionImageFormatProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; SamplerYcbcrConversionImageFormatProperties(VkSamplerYcbcrConversionImageFormatProperties const &rhs) VULKAN_HPP_NOEXCEPT : SamplerYcbcrConversionImageFormatProperties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SamplerYcbcrConversionImageFormatProperties &operator=(SamplerYcbcrConversionImageFormatProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; SamplerYcbcrConversionImageFormatProperties &operator=(VkSamplerYcbcrConversionImageFormatProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkSamplerYcbcrConversionImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSamplerYcbcrConversionImageFormatProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, combinedImageSamplerDescriptorCount); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SamplerYcbcrConversionImageFormatProperties const &) const = default; #else bool operator==(SamplerYcbcrConversionImageFormatProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (combinedImageSamplerDescriptorCount == rhs.combinedImageSamplerDescriptorCount); # endif } bool operator!=(SamplerYcbcrConversionImageFormatProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionImageFormatProperties; void *pNext = {}; uint32_t combinedImageSamplerDescriptorCount = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties) == sizeof(VkSamplerYcbcrConversionImageFormatProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SamplerYcbcrConversionImageFormatProperties is not nothrow_move_constructible!"); template<> struct CppType { using Type = SamplerYcbcrConversionImageFormatProperties; }; using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties; struct SamplerYcbcrConversionInfo { using NativeType = VkSamplerYcbcrConversionInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), conversion(conversion_) { } VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo(SamplerYcbcrConversionInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SamplerYcbcrConversionInfo(VkSamplerYcbcrConversionInfo const &rhs) VULKAN_HPP_NOEXCEPT : SamplerYcbcrConversionInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SamplerYcbcrConversionInfo &operator=(SamplerYcbcrConversionInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SamplerYcbcrConversionInfo &operator=(VkSamplerYcbcrConversionInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionInfo &setConversion(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_) VULKAN_HPP_NOEXCEPT { conversion = conversion_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSamplerYcbcrConversionInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSamplerYcbcrConversionInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, conversion); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SamplerYcbcrConversionInfo const &) const = default; #else bool operator==(SamplerYcbcrConversionInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (conversion == rhs.conversion); # endif } bool operator!=(SamplerYcbcrConversionInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo) == sizeof(VkSamplerYcbcrConversionInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SamplerYcbcrConversionInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = SamplerYcbcrConversionInfo; }; using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo; #if defined(VK_USE_PLATFORM_SCREEN_QNX) struct ScreenSurfaceCreateInfoQNX { using NativeType = VkScreenSurfaceCreateInfoQNX; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eScreenSurfaceCreateInfoQNX; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ScreenSurfaceCreateInfoQNX(VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags_ = {}, struct _screen_context *context_ = {}, struct _screen_window *window_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), context(context_), window(window_) { } VULKAN_HPP_CONSTEXPR ScreenSurfaceCreateInfoQNX(ScreenSurfaceCreateInfoQNX const &rhs) VULKAN_HPP_NOEXCEPT = default; ScreenSurfaceCreateInfoQNX(VkScreenSurfaceCreateInfoQNX const &rhs) VULKAN_HPP_NOEXCEPT : ScreenSurfaceCreateInfoQNX(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ScreenSurfaceCreateInfoQNX &operator=(ScreenSurfaceCreateInfoQNX const &rhs) VULKAN_HPP_NOEXCEPT = default; ScreenSurfaceCreateInfoQNX &operator=(VkScreenSurfaceCreateInfoQNX const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX &setFlags(VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX &setContext(struct _screen_context *context_) VULKAN_HPP_NOEXCEPT { context = context_; return *this; } VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX &setWindow(struct _screen_window *window_) VULKAN_HPP_NOEXCEPT { window = window_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkScreenSurfaceCreateInfoQNX const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkScreenSurfaceCreateInfoQNX &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, context, window); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ScreenSurfaceCreateInfoQNX const &) const = default; # else bool operator==(ScreenSurfaceCreateInfoQNX const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (context == rhs.context) && (window == rhs.window); # endif } bool operator!=(ScreenSurfaceCreateInfoQNX const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eScreenSurfaceCreateInfoQNX; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags = {}; struct _screen_context *context = {}; struct _screen_window *window = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX) == sizeof(VkScreenSurfaceCreateInfoQNX), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ScreenSurfaceCreateInfoQNX is not nothrow_move_constructible!"); template<> struct CppType { using Type = ScreenSurfaceCreateInfoQNX; }; #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ struct SemaphoreCreateInfo { using NativeType = VkSemaphoreCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo(VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_) { } VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo(SemaphoreCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SemaphoreCreateInfo(VkSemaphoreCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : SemaphoreCreateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SemaphoreCreateInfo &operator=(SemaphoreCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SemaphoreCreateInfo &operator=(VkSemaphoreCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SemaphoreCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SemaphoreCreateInfo &setFlags(VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SemaphoreCreateInfo const &) const = default; #else bool operator==(SemaphoreCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags); # endif } bool operator!=(SemaphoreCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo) == sizeof(VkSemaphoreCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SemaphoreCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = SemaphoreCreateInfo; }; struct SemaphoreGetFdInfoKHR { using NativeType = VkSemaphoreGetFdInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetFdInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), semaphore(semaphore_), handleType(handleType_) { } VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR(SemaphoreGetFdInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; SemaphoreGetFdInfoKHR(VkSemaphoreGetFdInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : SemaphoreGetFdInfoKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SemaphoreGetFdInfoKHR &operator=(SemaphoreGetFdInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; SemaphoreGetFdInfoKHR &operator=(VkSemaphoreGetFdInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR &setSemaphore(VULKAN_HPP_NAMESPACE::Semaphore semaphore_) VULKAN_HPP_NOEXCEPT { semaphore = semaphore_; return *this; } VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR &setHandleType(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSemaphoreGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSemaphoreGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, semaphore, handleType); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SemaphoreGetFdInfoKHR const &) const = default; #else bool operator==(SemaphoreGetFdInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (semaphore == rhs.semaphore) && (handleType == rhs.handleType); # endif } bool operator!=(SemaphoreGetFdInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetFdInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR) == sizeof(VkSemaphoreGetFdInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SemaphoreGetFdInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = SemaphoreGetFdInfoKHR; }; #if defined(VK_USE_PLATFORM_WIN32_KHR) struct SemaphoreGetWin32HandleInfoKHR { using NativeType = VkSemaphoreGetWin32HandleInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetWin32HandleInfoKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), semaphore(semaphore_), handleType(handleType_) { } VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR(SemaphoreGetWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; SemaphoreGetWin32HandleInfoKHR(VkSemaphoreGetWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : SemaphoreGetWin32HandleInfoKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SemaphoreGetWin32HandleInfoKHR &operator=(SemaphoreGetWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; SemaphoreGetWin32HandleInfoKHR &operator=(VkSemaphoreGetWin32HandleInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR &setSemaphore(VULKAN_HPP_NAMESPACE::Semaphore semaphore_) VULKAN_HPP_NOEXCEPT { semaphore = semaphore_; return *this; } VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & setHandleType(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSemaphoreGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSemaphoreGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, semaphore, handleType); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SemaphoreGetWin32HandleInfoKHR const &) const = default; # else bool operator==(SemaphoreGetWin32HandleInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (semaphore == rhs.semaphore) && (handleType == rhs.handleType); # endif } bool operator!=(SemaphoreGetWin32HandleInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR) == sizeof(VkSemaphoreGetWin32HandleInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SemaphoreGetWin32HandleInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = SemaphoreGetWin32HandleInfoKHR; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #if defined(VK_USE_PLATFORM_FUCHSIA) struct SemaphoreGetZirconHandleInfoFUCHSIA { using NativeType = VkSemaphoreGetZirconHandleInfoFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SemaphoreGetZirconHandleInfoFUCHSIA( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), semaphore(semaphore_), handleType(handleType_) { } VULKAN_HPP_CONSTEXPR SemaphoreGetZirconHandleInfoFUCHSIA(SemaphoreGetZirconHandleInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; SemaphoreGetZirconHandleInfoFUCHSIA(VkSemaphoreGetZirconHandleInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT : SemaphoreGetZirconHandleInfoFUCHSIA(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SemaphoreGetZirconHandleInfoFUCHSIA &operator=(SemaphoreGetZirconHandleInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT = default; SemaphoreGetZirconHandleInfoFUCHSIA &operator=(VkSemaphoreGetZirconHandleInfoFUCHSIA const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA &setSemaphore(VULKAN_HPP_NAMESPACE::Semaphore semaphore_) VULKAN_HPP_NOEXCEPT { semaphore = semaphore_; return *this; } VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & setHandleType(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSemaphoreGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSemaphoreGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, semaphore, handleType); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SemaphoreGetZirconHandleInfoFUCHSIA const &) const = default; # else bool operator==(SemaphoreGetZirconHandleInfoFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (semaphore == rhs.semaphore) && (handleType == rhs.handleType); # endif } bool operator!=(SemaphoreGetZirconHandleInfoFUCHSIA const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA) == sizeof(VkSemaphoreGetZirconHandleInfoFUCHSIA), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SemaphoreGetZirconHandleInfoFUCHSIA is not nothrow_move_constructible!"); template<> struct CppType { using Type = SemaphoreGetZirconHandleInfoFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ struct SemaphoreSignalInfo { using NativeType = VkSemaphoreSignalInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSignalInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, uint64_t value_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), semaphore(semaphore_), value(value_) { } VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo(SemaphoreSignalInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SemaphoreSignalInfo(VkSemaphoreSignalInfo const &rhs) VULKAN_HPP_NOEXCEPT : SemaphoreSignalInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SemaphoreSignalInfo &operator=(SemaphoreSignalInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SemaphoreSignalInfo &operator=(VkSemaphoreSignalInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo &setSemaphore(VULKAN_HPP_NAMESPACE::Semaphore semaphore_) VULKAN_HPP_NOEXCEPT { semaphore = semaphore_; return *this; } VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo &setValue(uint64_t value_) VULKAN_HPP_NOEXCEPT { value = value_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSemaphoreSignalInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSemaphoreSignalInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, semaphore, value); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SemaphoreSignalInfo const &) const = default; #else bool operator==(SemaphoreSignalInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (semaphore == rhs.semaphore) && (value == rhs.value); # endif } bool operator!=(SemaphoreSignalInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSignalInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; uint64_t value = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo) == sizeof(VkSemaphoreSignalInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SemaphoreSignalInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = SemaphoreSignalInfo; }; using SemaphoreSignalInfoKHR = SemaphoreSignalInfo; struct SemaphoreSubmitInfo { using NativeType = VkSemaphoreSubmitInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSubmitInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfo(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, uint64_t value_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask_ = {}, uint32_t deviceIndex_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), semaphore(semaphore_), value(value_), stageMask(stageMask_), deviceIndex(deviceIndex_) { } VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfo(SemaphoreSubmitInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SemaphoreSubmitInfo(VkSemaphoreSubmitInfo const &rhs) VULKAN_HPP_NOEXCEPT : SemaphoreSubmitInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SemaphoreSubmitInfo &operator=(SemaphoreSubmitInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SemaphoreSubmitInfo &operator=(VkSemaphoreSubmitInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo &setSemaphore(VULKAN_HPP_NAMESPACE::Semaphore semaphore_) VULKAN_HPP_NOEXCEPT { semaphore = semaphore_; return *this; } VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo &setValue(uint64_t value_) VULKAN_HPP_NOEXCEPT { value = value_; return *this; } VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo &setStageMask(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask_) VULKAN_HPP_NOEXCEPT { stageMask = stageMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo &setDeviceIndex(uint32_t deviceIndex_) VULKAN_HPP_NOEXCEPT { deviceIndex = deviceIndex_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSemaphoreSubmitInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, semaphore, value, stageMask, deviceIndex); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SemaphoreSubmitInfo const &) const = default; #else bool operator==(SemaphoreSubmitInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (semaphore == rhs.semaphore) && (value == rhs.value) && (stageMask == rhs.stageMask) && (deviceIndex == rhs.deviceIndex); # endif } bool operator!=(SemaphoreSubmitInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSubmitInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; uint64_t value = {}; VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask = {}; uint32_t deviceIndex = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo) == sizeof(VkSemaphoreSubmitInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SemaphoreSubmitInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = SemaphoreSubmitInfo; }; using SemaphoreSubmitInfoKHR = SemaphoreSubmitInfo; struct SemaphoreTypeCreateInfo { using NativeType = VkSemaphoreTypeCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreTypeCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo(VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary, uint64_t initialValue_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), semaphoreType(semaphoreType_), initialValue(initialValue_) { } VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo(SemaphoreTypeCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SemaphoreTypeCreateInfo(VkSemaphoreTypeCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : SemaphoreTypeCreateInfo(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SemaphoreTypeCreateInfo &operator=(SemaphoreTypeCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SemaphoreTypeCreateInfo &operator=(VkSemaphoreTypeCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo &setSemaphoreType(VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_) VULKAN_HPP_NOEXCEPT { semaphoreType = semaphoreType_; return *this; } VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo &setInitialValue(uint64_t initialValue_) VULKAN_HPP_NOEXCEPT { initialValue = initialValue_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSemaphoreTypeCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSemaphoreTypeCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, semaphoreType, initialValue); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SemaphoreTypeCreateInfo const &) const = default; #else bool operator==(SemaphoreTypeCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (semaphoreType == rhs.semaphoreType) && (initialValue == rhs.initialValue); # endif } bool operator!=(SemaphoreTypeCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreTypeCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary; uint64_t initialValue = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo) == sizeof(VkSemaphoreTypeCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SemaphoreTypeCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = SemaphoreTypeCreateInfo; }; using SemaphoreTypeCreateInfoKHR = SemaphoreTypeCreateInfo; struct SemaphoreWaitInfo { using NativeType = VkSemaphoreWaitInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreWaitInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo(VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ = {}, uint32_t semaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore *pSemaphores_ = {}, const uint64_t *pValues_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), semaphoreCount(semaphoreCount_), pSemaphores(pSemaphores_), pValues(pValues_) { } VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo(SemaphoreWaitInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SemaphoreWaitInfo(VkSemaphoreWaitInfo const &rhs) VULKAN_HPP_NOEXCEPT : SemaphoreWaitInfo(*reinterpret_cast(&rhs)) {} # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SemaphoreWaitInfo(VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &semaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &values_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , semaphoreCount(static_cast(semaphores_.size())) , pSemaphores(semaphores_.data()) , pValues(values_.data()) { # ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT(semaphores_.size() == values_.size()); # else if(semaphores_.size() != values_.size()) { throw LogicError(VULKAN_HPP_NAMESPACE_STRING "::SemaphoreWaitInfo::SemaphoreWaitInfo: semaphores_.size() != values_.size()"); } # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SemaphoreWaitInfo &operator=(SemaphoreWaitInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SemaphoreWaitInfo &operator=(VkSemaphoreWaitInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo &setFlags(VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo &setSemaphoreCount(uint32_t semaphoreCount_) VULKAN_HPP_NOEXCEPT { semaphoreCount = semaphoreCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo &setPSemaphores(const VULKAN_HPP_NAMESPACE::Semaphore *pSemaphores_) VULKAN_HPP_NOEXCEPT { pSemaphores = pSemaphores_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SemaphoreWaitInfo & setSemaphores(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &semaphores_) VULKAN_HPP_NOEXCEPT { semaphoreCount = static_cast(semaphores_.size()); pSemaphores = semaphores_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo &setPValues(const uint64_t *pValues_) VULKAN_HPP_NOEXCEPT { pValues = pValues_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SemaphoreWaitInfo &setValues(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &values_) VULKAN_HPP_NOEXCEPT { semaphoreCount = static_cast(values_.size()); pValues = values_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSemaphoreWaitInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSemaphoreWaitInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, semaphoreCount, pSemaphores, pValues); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SemaphoreWaitInfo const &) const = default; #else bool operator==(SemaphoreWaitInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (semaphoreCount == rhs.semaphoreCount) && (pSemaphores == rhs.pSemaphores) && (pValues == rhs.pValues); # endif } bool operator!=(SemaphoreWaitInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreWaitInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags = {}; uint32_t semaphoreCount = {}; const VULKAN_HPP_NAMESPACE::Semaphore *pSemaphores = {}; const uint64_t *pValues = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo) == sizeof(VkSemaphoreWaitInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SemaphoreWaitInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = SemaphoreWaitInfo; }; using SemaphoreWaitInfoKHR = SemaphoreWaitInfo; struct SetStateFlagsIndirectCommandNV { using NativeType = VkSetStateFlagsIndirectCommandNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV(uint32_t data_ = {}) VULKAN_HPP_NOEXCEPT : data(data_) {} VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV(SetStateFlagsIndirectCommandNV const &rhs) VULKAN_HPP_NOEXCEPT = default; SetStateFlagsIndirectCommandNV(VkSetStateFlagsIndirectCommandNV const &rhs) VULKAN_HPP_NOEXCEPT : SetStateFlagsIndirectCommandNV(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SetStateFlagsIndirectCommandNV &operator=(SetStateFlagsIndirectCommandNV const &rhs) VULKAN_HPP_NOEXCEPT = default; SetStateFlagsIndirectCommandNV &operator=(VkSetStateFlagsIndirectCommandNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SetStateFlagsIndirectCommandNV &setData(uint32_t data_) VULKAN_HPP_NOEXCEPT { data = data_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSetStateFlagsIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSetStateFlagsIndirectCommandNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(data); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SetStateFlagsIndirectCommandNV const &) const = default; #else bool operator==(SetStateFlagsIndirectCommandNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (data == rhs.data); # endif } bool operator!=(SetStateFlagsIndirectCommandNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t data = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV) == sizeof(VkSetStateFlagsIndirectCommandNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SetStateFlagsIndirectCommandNV is not nothrow_move_constructible!"); struct ShaderModuleCreateInfo { using NativeType = VkShaderModuleCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleCreateInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo(VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ = {}, size_t codeSize_ = {}, const uint32_t *pCode_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), codeSize(codeSize_), pCode(pCode_) { } VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo(ShaderModuleCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; ShaderModuleCreateInfo(VkShaderModuleCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT : ShaderModuleCreateInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) ShaderModuleCreateInfo(VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &code_, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , codeSize(code_.size() * 4) , pCode(code_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ShaderModuleCreateInfo &operator=(ShaderModuleCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; ShaderModuleCreateInfo &operator=(VkShaderModuleCreateInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo &setFlags(VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo &setCodeSize(size_t codeSize_) VULKAN_HPP_NOEXCEPT { codeSize = codeSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo &setPCode(const uint32_t *pCode_) VULKAN_HPP_NOEXCEPT { pCode = pCode_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) ShaderModuleCreateInfo &setCode(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &code_) VULKAN_HPP_NOEXCEPT { codeSize = code_.size() * 4; pCode = code_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkShaderModuleCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkShaderModuleCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, codeSize, pCode); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ShaderModuleCreateInfo const &) const = default; #else bool operator==(ShaderModuleCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (codeSize == rhs.codeSize) && (pCode == rhs.pCode); # endif } bool operator!=(ShaderModuleCreateInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleCreateInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags = {}; size_t codeSize = {}; const uint32_t *pCode = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo) == sizeof(VkShaderModuleCreateInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ShaderModuleCreateInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = ShaderModuleCreateInfo; }; struct ShaderModuleValidationCacheCreateInfoEXT { using NativeType = VkShaderModuleValidationCacheCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleValidationCacheCreateInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), validationCache(validationCache_) { } VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT(ShaderModuleValidationCacheCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; ShaderModuleValidationCacheCreateInfoEXT(VkShaderModuleValidationCacheCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : ShaderModuleValidationCacheCreateInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ShaderModuleValidationCacheCreateInfoEXT &operator=(ShaderModuleValidationCacheCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; ShaderModuleValidationCacheCreateInfoEXT &operator=(VkShaderModuleValidationCacheCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ShaderModuleValidationCacheCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ShaderModuleValidationCacheCreateInfoEXT & setValidationCache(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_) VULKAN_HPP_NOEXCEPT { validationCache = validationCache_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkShaderModuleValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkShaderModuleValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, validationCache); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ShaderModuleValidationCacheCreateInfoEXT const &) const = default; #else bool operator==(ShaderModuleValidationCacheCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (validationCache == rhs.validationCache); # endif } bool operator!=(ShaderModuleValidationCacheCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleValidationCacheCreateInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT) == sizeof(VkShaderModuleValidationCacheCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ShaderModuleValidationCacheCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = ShaderModuleValidationCacheCreateInfoEXT; }; struct ShaderResourceUsageAMD { using NativeType = VkShaderResourceUsageAMD; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD(uint32_t numUsedVgprs_ = {}, uint32_t numUsedSgprs_ = {}, uint32_t ldsSizePerLocalWorkGroup_ = {}, size_t ldsUsageSizeInBytes_ = {}, size_t scratchMemUsageInBytes_ = {}) VULKAN_HPP_NOEXCEPT : numUsedVgprs(numUsedVgprs_), numUsedSgprs(numUsedSgprs_), ldsSizePerLocalWorkGroup(ldsSizePerLocalWorkGroup_), ldsUsageSizeInBytes(ldsUsageSizeInBytes_), scratchMemUsageInBytes(scratchMemUsageInBytes_) { } VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD(ShaderResourceUsageAMD const &rhs) VULKAN_HPP_NOEXCEPT = default; ShaderResourceUsageAMD(VkShaderResourceUsageAMD const &rhs) VULKAN_HPP_NOEXCEPT : ShaderResourceUsageAMD(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ShaderResourceUsageAMD &operator=(ShaderResourceUsageAMD const &rhs) VULKAN_HPP_NOEXCEPT = default; ShaderResourceUsageAMD &operator=(VkShaderResourceUsageAMD const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkShaderResourceUsageAMD const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkShaderResourceUsageAMD &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(numUsedVgprs, numUsedSgprs, ldsSizePerLocalWorkGroup, ldsUsageSizeInBytes, scratchMemUsageInBytes); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ShaderResourceUsageAMD const &) const = default; #else bool operator==(ShaderResourceUsageAMD const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (numUsedVgprs == rhs.numUsedVgprs) && (numUsedSgprs == rhs.numUsedSgprs) && (ldsSizePerLocalWorkGroup == rhs.ldsSizePerLocalWorkGroup) && (ldsUsageSizeInBytes == rhs.ldsUsageSizeInBytes) && (scratchMemUsageInBytes == rhs.scratchMemUsageInBytes); # endif } bool operator!=(ShaderResourceUsageAMD const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t numUsedVgprs = {}; uint32_t numUsedSgprs = {}; uint32_t ldsSizePerLocalWorkGroup = {}; size_t ldsUsageSizeInBytes = {}; size_t scratchMemUsageInBytes = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD) == sizeof(VkShaderResourceUsageAMD), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ShaderResourceUsageAMD is not nothrow_move_constructible!"); struct ShaderStatisticsInfoAMD { using NativeType = VkShaderStatisticsInfoAMD; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD(VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask_ = {}, VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage_ = {}, uint32_t numPhysicalVgprs_ = {}, uint32_t numPhysicalSgprs_ = {}, uint32_t numAvailableVgprs_ = {}, uint32_t numAvailableSgprs_ = {}, std::array const &computeWorkGroupSize_ = {}) VULKAN_HPP_NOEXCEPT : shaderStageMask(shaderStageMask_), resourceUsage(resourceUsage_), numPhysicalVgprs(numPhysicalVgprs_), numPhysicalSgprs(numPhysicalSgprs_), numAvailableVgprs(numAvailableVgprs_), numAvailableSgprs(numAvailableSgprs_), computeWorkGroupSize(computeWorkGroupSize_) { } VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD(ShaderStatisticsInfoAMD const &rhs) VULKAN_HPP_NOEXCEPT = default; ShaderStatisticsInfoAMD(VkShaderStatisticsInfoAMD const &rhs) VULKAN_HPP_NOEXCEPT : ShaderStatisticsInfoAMD(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ShaderStatisticsInfoAMD &operator=(ShaderStatisticsInfoAMD const &rhs) VULKAN_HPP_NOEXCEPT = default; ShaderStatisticsInfoAMD &operator=(VkShaderStatisticsInfoAMD const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkShaderStatisticsInfoAMD const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkShaderStatisticsInfoAMD &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(shaderStageMask, resourceUsage, numPhysicalVgprs, numPhysicalSgprs, numAvailableVgprs, numAvailableSgprs, computeWorkGroupSize); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ShaderStatisticsInfoAMD const &) const = default; #else bool operator==(ShaderStatisticsInfoAMD const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (shaderStageMask == rhs.shaderStageMask) && (resourceUsage == rhs.resourceUsage) && (numPhysicalVgprs == rhs.numPhysicalVgprs) && (numPhysicalSgprs == rhs.numPhysicalSgprs) && (numAvailableVgprs == rhs.numAvailableVgprs) && (numAvailableSgprs == rhs.numAvailableSgprs) && (computeWorkGroupSize == rhs.computeWorkGroupSize); # endif } bool operator!=(ShaderStatisticsInfoAMD const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask = {}; VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage = {}; uint32_t numPhysicalVgprs = {}; uint32_t numPhysicalSgprs = {}; uint32_t numAvailableVgprs = {}; uint32_t numAvailableSgprs = {}; VULKAN_HPP_NAMESPACE::ArrayWrapper1D computeWorkGroupSize = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD) == sizeof(VkShaderStatisticsInfoAMD), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ShaderStatisticsInfoAMD is not nothrow_move_constructible!"); struct SharedPresentSurfaceCapabilitiesKHR { using NativeType = VkSharedPresentSurfaceCapabilitiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSharedPresentSurfaceCapabilitiesKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR(VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), sharedPresentSupportedUsageFlags(sharedPresentSupportedUsageFlags_) { } VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR(SharedPresentSurfaceCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; SharedPresentSurfaceCapabilitiesKHR(VkSharedPresentSurfaceCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT : SharedPresentSurfaceCapabilitiesKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SharedPresentSurfaceCapabilitiesKHR &operator=(SharedPresentSurfaceCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; SharedPresentSurfaceCapabilitiesKHR &operator=(VkSharedPresentSurfaceCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkSharedPresentSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSharedPresentSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, sharedPresentSupportedUsageFlags); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SharedPresentSurfaceCapabilitiesKHR const &) const = default; #else bool operator==(SharedPresentSurfaceCapabilitiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (sharedPresentSupportedUsageFlags == rhs.sharedPresentSupportedUsageFlags); # endif } bool operator!=(SharedPresentSurfaceCapabilitiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSharedPresentSurfaceCapabilitiesKHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR) == sizeof(VkSharedPresentSurfaceCapabilitiesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SharedPresentSurfaceCapabilitiesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = SharedPresentSurfaceCapabilitiesKHR; }; struct SparseImageFormatProperties { using NativeType = VkSparseImageFormatProperties; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SparseImageFormatProperties(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageGranularity_ = {}, VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags_ = {}) VULKAN_HPP_NOEXCEPT : aspectMask(aspectMask_), imageGranularity(imageGranularity_), flags(flags_) { } VULKAN_HPP_CONSTEXPR SparseImageFormatProperties(SparseImageFormatProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; SparseImageFormatProperties(VkSparseImageFormatProperties const &rhs) VULKAN_HPP_NOEXCEPT : SparseImageFormatProperties(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SparseImageFormatProperties &operator=(SparseImageFormatProperties const &rhs) VULKAN_HPP_NOEXCEPT = default; SparseImageFormatProperties &operator=(VkSparseImageFormatProperties const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkSparseImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSparseImageFormatProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(aspectMask, imageGranularity, flags); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SparseImageFormatProperties const &) const = default; #else bool operator==(SparseImageFormatProperties const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (aspectMask == rhs.aspectMask) && (imageGranularity == rhs.imageGranularity) && (flags == rhs.flags); # endif } bool operator!=(SparseImageFormatProperties const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; VULKAN_HPP_NAMESPACE::Extent3D imageGranularity = {}; VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SparseImageFormatProperties) == sizeof(VkSparseImageFormatProperties), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SparseImageFormatProperties is not nothrow_move_constructible!"); struct SparseImageFormatProperties2 { using NativeType = VkSparseImageFormatProperties2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageFormatProperties2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2(VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), properties(properties_) { } VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2(SparseImageFormatProperties2 const &rhs) VULKAN_HPP_NOEXCEPT = default; SparseImageFormatProperties2(VkSparseImageFormatProperties2 const &rhs) VULKAN_HPP_NOEXCEPT : SparseImageFormatProperties2(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SparseImageFormatProperties2 &operator=(SparseImageFormatProperties2 const &rhs) VULKAN_HPP_NOEXCEPT = default; SparseImageFormatProperties2 &operator=(VkSparseImageFormatProperties2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkSparseImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSparseImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, properties); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SparseImageFormatProperties2 const &) const = default; #else bool operator==(SparseImageFormatProperties2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (properties == rhs.properties); # endif } bool operator!=(SparseImageFormatProperties2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageFormatProperties2; void *pNext = {}; VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2) == sizeof(VkSparseImageFormatProperties2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SparseImageFormatProperties2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = SparseImageFormatProperties2; }; using SparseImageFormatProperties2KHR = SparseImageFormatProperties2; struct SparseImageMemoryRequirements { using NativeType = VkSparseImageMemoryRequirements; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements(VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties_ = {}, uint32_t imageMipTailFirstLod_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride_ = {}) VULKAN_HPP_NOEXCEPT : formatProperties(formatProperties_), imageMipTailFirstLod(imageMipTailFirstLod_), imageMipTailSize(imageMipTailSize_), imageMipTailOffset(imageMipTailOffset_), imageMipTailStride(imageMipTailStride_) { } VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements(SparseImageMemoryRequirements const &rhs) VULKAN_HPP_NOEXCEPT = default; SparseImageMemoryRequirements(VkSparseImageMemoryRequirements const &rhs) VULKAN_HPP_NOEXCEPT : SparseImageMemoryRequirements(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SparseImageMemoryRequirements &operator=(SparseImageMemoryRequirements const &rhs) VULKAN_HPP_NOEXCEPT = default; SparseImageMemoryRequirements &operator=(VkSparseImageMemoryRequirements const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkSparseImageMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSparseImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(formatProperties, imageMipTailFirstLod, imageMipTailSize, imageMipTailOffset, imageMipTailStride); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SparseImageMemoryRequirements const &) const = default; #else bool operator==(SparseImageMemoryRequirements const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (formatProperties == rhs.formatProperties) && (imageMipTailFirstLod == rhs.imageMipTailFirstLod) && (imageMipTailSize == rhs.imageMipTailSize) && (imageMipTailOffset == rhs.imageMipTailOffset) && (imageMipTailStride == rhs.imageMipTailStride); # endif } bool operator!=(SparseImageMemoryRequirements const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties = {}; uint32_t imageMipTailFirstLod = {}; VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize = {}; VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset = {}; VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements) == sizeof(VkSparseImageMemoryRequirements), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SparseImageMemoryRequirements is not nothrow_move_constructible!"); struct SparseImageMemoryRequirements2 { using NativeType = VkSparseImageMemoryRequirements2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageMemoryRequirements2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2(VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), memoryRequirements(memoryRequirements_) { } VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2(SparseImageMemoryRequirements2 const &rhs) VULKAN_HPP_NOEXCEPT = default; SparseImageMemoryRequirements2(VkSparseImageMemoryRequirements2 const &rhs) VULKAN_HPP_NOEXCEPT : SparseImageMemoryRequirements2(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SparseImageMemoryRequirements2 &operator=(SparseImageMemoryRequirements2 const &rhs) VULKAN_HPP_NOEXCEPT = default; SparseImageMemoryRequirements2 &operator=(VkSparseImageMemoryRequirements2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkSparseImageMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSparseImageMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, memoryRequirements); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SparseImageMemoryRequirements2 const &) const = default; #else bool operator==(SparseImageMemoryRequirements2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (memoryRequirements == rhs.memoryRequirements); # endif } bool operator!=(SparseImageMemoryRequirements2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageMemoryRequirements2; void *pNext = {}; VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2) == sizeof(VkSparseImageMemoryRequirements2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SparseImageMemoryRequirements2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = SparseImageMemoryRequirements2; }; using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2; #if defined(VK_USE_PLATFORM_GGP) struct StreamDescriptorSurfaceCreateInfoGGP { using NativeType = VkStreamDescriptorSurfaceCreateInfoGGP; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP(VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ = {}, GgpStreamDescriptor streamDescriptor_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), streamDescriptor(streamDescriptor_) { } VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP(StreamDescriptorSurfaceCreateInfoGGP const &rhs) VULKAN_HPP_NOEXCEPT = default; StreamDescriptorSurfaceCreateInfoGGP(VkStreamDescriptorSurfaceCreateInfoGGP const &rhs) VULKAN_HPP_NOEXCEPT : StreamDescriptorSurfaceCreateInfoGGP(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ StreamDescriptorSurfaceCreateInfoGGP &operator=(StreamDescriptorSurfaceCreateInfoGGP const &rhs) VULKAN_HPP_NOEXCEPT = default; StreamDescriptorSurfaceCreateInfoGGP &operator=(VkStreamDescriptorSurfaceCreateInfoGGP const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & setFlags(VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP &setStreamDescriptor(GgpStreamDescriptor streamDescriptor_) VULKAN_HPP_NOEXCEPT { streamDescriptor = streamDescriptor_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkStreamDescriptorSurfaceCreateInfoGGP const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkStreamDescriptorSurfaceCreateInfoGGP &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, streamDescriptor); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) std::strong_ordering operator<=>(StreamDescriptorSurfaceCreateInfoGGP const &rhs) const VULKAN_HPP_NOEXCEPT { if(auto cmp = sType <=> rhs.sType; cmp != 0) return cmp; if(auto cmp = pNext <=> rhs.pNext; cmp != 0) return cmp; if(auto cmp = flags <=> rhs.flags; cmp != 0) return cmp; if(auto cmp = memcmp(&streamDescriptor, &rhs.streamDescriptor, sizeof(GgpStreamDescriptor)); cmp != 0) return (cmp < 0) ? std::strong_ordering::less : std::strong_ordering::greater; return std::strong_ordering::equivalent; } # endif bool operator==(StreamDescriptorSurfaceCreateInfoGGP const &rhs) const VULKAN_HPP_NOEXCEPT { return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (memcmp(&streamDescriptor, &rhs.streamDescriptor, sizeof(GgpStreamDescriptor)) == 0); } bool operator!=(StreamDescriptorSurfaceCreateInfoGGP const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP; const void *pNext = {}; VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags = {}; GgpStreamDescriptor streamDescriptor = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP) == sizeof(VkStreamDescriptorSurfaceCreateInfoGGP), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "StreamDescriptorSurfaceCreateInfoGGP is not nothrow_move_constructible!"); template<> struct CppType { using Type = StreamDescriptorSurfaceCreateInfoGGP; }; #endif /*VK_USE_PLATFORM_GGP*/ struct StridedDeviceAddressRegionKHR { using NativeType = VkStridedDeviceAddressRegionKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT : deviceAddress(deviceAddress_), stride(stride_), size(size_) { } VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR(StridedDeviceAddressRegionKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; StridedDeviceAddressRegionKHR(VkStridedDeviceAddressRegionKHR const &rhs) VULKAN_HPP_NOEXCEPT : StridedDeviceAddressRegionKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ StridedDeviceAddressRegionKHR &operator=(StridedDeviceAddressRegionKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; StridedDeviceAddressRegionKHR &operator=(VkStridedDeviceAddressRegionKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR &setDeviceAddress(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_) VULKAN_HPP_NOEXCEPT { deviceAddress = deviceAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR &setStride(VULKAN_HPP_NAMESPACE::DeviceSize stride_) VULKAN_HPP_NOEXCEPT { stride = stride_; return *this; } VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR &setSize(VULKAN_HPP_NAMESPACE::DeviceSize size_) VULKAN_HPP_NOEXCEPT { size = size_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkStridedDeviceAddressRegionKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkStridedDeviceAddressRegionKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(deviceAddress, stride, size); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(StridedDeviceAddressRegionKHR const &) const = default; #else bool operator==(StridedDeviceAddressRegionKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (deviceAddress == rhs.deviceAddress) && (stride == rhs.stride) && (size == rhs.size); # endif } bool operator!=(StridedDeviceAddressRegionKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; VULKAN_HPP_NAMESPACE::DeviceSize stride = {}; VULKAN_HPP_NAMESPACE::DeviceSize size = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR) == sizeof(VkStridedDeviceAddressRegionKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "StridedDeviceAddressRegionKHR is not nothrow_move_constructible!"); struct SubmitInfo { using NativeType = VkSubmitInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SubmitInfo(uint32_t waitSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore *pWaitSemaphores_ = {}, const VULKAN_HPP_NAMESPACE::PipelineStageFlags *pWaitDstStageMask_ = {}, uint32_t commandBufferCount_ = {}, const VULKAN_HPP_NAMESPACE::CommandBuffer *pCommandBuffers_ = {}, uint32_t signalSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore *pSignalSemaphores_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), waitSemaphoreCount(waitSemaphoreCount_), pWaitSemaphores(pWaitSemaphores_), pWaitDstStageMask(pWaitDstStageMask_), commandBufferCount(commandBufferCount_), pCommandBuffers(pCommandBuffers_), signalSemaphoreCount(signalSemaphoreCount_), pSignalSemaphores(pSignalSemaphores_) { } VULKAN_HPP_CONSTEXPR SubmitInfo(SubmitInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SubmitInfo(VkSubmitInfo const &rhs) VULKAN_HPP_NOEXCEPT : SubmitInfo(*reinterpret_cast(&rhs)) {} # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SubmitInfo(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &waitSemaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &waitDstStageMask_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &commandBuffers_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &signalSemaphores_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , waitSemaphoreCount(static_cast(waitSemaphores_.size())) , pWaitSemaphores(waitSemaphores_.data()) , pWaitDstStageMask(waitDstStageMask_.data()) , commandBufferCount(static_cast(commandBuffers_.size())) , pCommandBuffers(commandBuffers_.data()) , signalSemaphoreCount(static_cast(signalSemaphores_.size())) , pSignalSemaphores(signalSemaphores_.data()) { # ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT(waitSemaphores_.size() == waitDstStageMask_.size()); # else if(waitSemaphores_.size() != waitDstStageMask_.size()) { throw LogicError(VULKAN_HPP_NAMESPACE_STRING "::SubmitInfo::SubmitInfo: waitSemaphores_.size() != waitDstStageMask_.size()"); } # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SubmitInfo &operator=(SubmitInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SubmitInfo &operator=(VkSubmitInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SubmitInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubmitInfo &setWaitSemaphoreCount(uint32_t waitSemaphoreCount_) VULKAN_HPP_NOEXCEPT { waitSemaphoreCount = waitSemaphoreCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubmitInfo &setPWaitSemaphores(const VULKAN_HPP_NAMESPACE::Semaphore *pWaitSemaphores_) VULKAN_HPP_NOEXCEPT { pWaitSemaphores = pWaitSemaphores_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SubmitInfo & setWaitSemaphores(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &waitSemaphores_) VULKAN_HPP_NOEXCEPT { waitSemaphoreCount = static_cast(waitSemaphores_.size()); pWaitSemaphores = waitSemaphores_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 SubmitInfo &setPWaitDstStageMask(const VULKAN_HPP_NAMESPACE::PipelineStageFlags *pWaitDstStageMask_) VULKAN_HPP_NOEXCEPT { pWaitDstStageMask = pWaitDstStageMask_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SubmitInfo &setWaitDstStageMask(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &waitDstStageMask_) VULKAN_HPP_NOEXCEPT { waitSemaphoreCount = static_cast(waitDstStageMask_.size()); pWaitDstStageMask = waitDstStageMask_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 SubmitInfo &setCommandBufferCount(uint32_t commandBufferCount_) VULKAN_HPP_NOEXCEPT { commandBufferCount = commandBufferCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubmitInfo &setPCommandBuffers(const VULKAN_HPP_NAMESPACE::CommandBuffer *pCommandBuffers_) VULKAN_HPP_NOEXCEPT { pCommandBuffers = pCommandBuffers_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SubmitInfo & setCommandBuffers(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &commandBuffers_) VULKAN_HPP_NOEXCEPT { commandBufferCount = static_cast(commandBuffers_.size()); pCommandBuffers = commandBuffers_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 SubmitInfo &setSignalSemaphoreCount(uint32_t signalSemaphoreCount_) VULKAN_HPP_NOEXCEPT { signalSemaphoreCount = signalSemaphoreCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubmitInfo &setPSignalSemaphores(const VULKAN_HPP_NAMESPACE::Semaphore *pSignalSemaphores_) VULKAN_HPP_NOEXCEPT { pSignalSemaphores = pSignalSemaphores_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SubmitInfo & setSignalSemaphores(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &signalSemaphores_) VULKAN_HPP_NOEXCEPT { signalSemaphoreCount = static_cast(signalSemaphores_.size()); pSignalSemaphores = signalSemaphores_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSubmitInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSubmitInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, waitSemaphoreCount, pWaitSemaphores, pWaitDstStageMask, commandBufferCount, pCommandBuffers, signalSemaphoreCount, pSignalSemaphores); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SubmitInfo const &) const = default; #else bool operator==(SubmitInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (waitSemaphoreCount == rhs.waitSemaphoreCount) && (pWaitSemaphores == rhs.pWaitSemaphores) && (pWaitDstStageMask == rhs.pWaitDstStageMask) && (commandBufferCount == rhs.commandBufferCount) && (pCommandBuffers == rhs.pCommandBuffers) && (signalSemaphoreCount == rhs.signalSemaphoreCount) && (pSignalSemaphores == rhs.pSignalSemaphores); # endif } bool operator!=(SubmitInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo; const void *pNext = {}; uint32_t waitSemaphoreCount = {}; const VULKAN_HPP_NAMESPACE::Semaphore *pWaitSemaphores = {}; const VULKAN_HPP_NAMESPACE::PipelineStageFlags *pWaitDstStageMask = {}; uint32_t commandBufferCount = {}; const VULKAN_HPP_NAMESPACE::CommandBuffer *pCommandBuffers = {}; uint32_t signalSemaphoreCount = {}; const VULKAN_HPP_NAMESPACE::Semaphore *pSignalSemaphores = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SubmitInfo) == sizeof(VkSubmitInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SubmitInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = SubmitInfo; }; struct SubmitInfo2 { using NativeType = VkSubmitInfo2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo2; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SubmitInfo2(VULKAN_HPP_NAMESPACE::SubmitFlags flags_ = {}, uint32_t waitSemaphoreInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo *pWaitSemaphoreInfos_ = {}, uint32_t commandBufferInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo *pCommandBufferInfos_ = {}, uint32_t signalSemaphoreInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo *pSignalSemaphoreInfos_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), waitSemaphoreInfoCount(waitSemaphoreInfoCount_), pWaitSemaphoreInfos(pWaitSemaphoreInfos_), commandBufferInfoCount(commandBufferInfoCount_), pCommandBufferInfos(pCommandBufferInfos_), signalSemaphoreInfoCount(signalSemaphoreInfoCount_), pSignalSemaphoreInfos(pSignalSemaphoreInfos_) { } VULKAN_HPP_CONSTEXPR SubmitInfo2(SubmitInfo2 const &rhs) VULKAN_HPP_NOEXCEPT = default; SubmitInfo2(VkSubmitInfo2 const &rhs) VULKAN_HPP_NOEXCEPT : SubmitInfo2(*reinterpret_cast(&rhs)) {} # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SubmitInfo2(VULKAN_HPP_NAMESPACE::SubmitFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &waitSemaphoreInfos_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &commandBufferInfos_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &signalSemaphoreInfos_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , waitSemaphoreInfoCount(static_cast(waitSemaphoreInfos_.size())) , pWaitSemaphoreInfos(waitSemaphoreInfos_.data()) , commandBufferInfoCount(static_cast(commandBufferInfos_.size())) , pCommandBufferInfos(commandBufferInfos_.data()) , signalSemaphoreInfoCount(static_cast(signalSemaphoreInfos_.size())) , pSignalSemaphoreInfos(signalSemaphoreInfos_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SubmitInfo2 &operator=(SubmitInfo2 const &rhs) VULKAN_HPP_NOEXCEPT = default; SubmitInfo2 &operator=(VkSubmitInfo2 const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 &setFlags(VULKAN_HPP_NAMESPACE::SubmitFlags flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 &setWaitSemaphoreInfoCount(uint32_t waitSemaphoreInfoCount_) VULKAN_HPP_NOEXCEPT { waitSemaphoreInfoCount = waitSemaphoreInfoCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 &setPWaitSemaphoreInfos(const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo *pWaitSemaphoreInfos_) VULKAN_HPP_NOEXCEPT { pWaitSemaphoreInfos = pWaitSemaphoreInfos_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SubmitInfo2 &setWaitSemaphoreInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &waitSemaphoreInfos_) VULKAN_HPP_NOEXCEPT { waitSemaphoreInfoCount = static_cast(waitSemaphoreInfos_.size()); pWaitSemaphoreInfos = waitSemaphoreInfos_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 &setCommandBufferInfoCount(uint32_t commandBufferInfoCount_) VULKAN_HPP_NOEXCEPT { commandBufferInfoCount = commandBufferInfoCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPCommandBufferInfos(const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo *pCommandBufferInfos_) VULKAN_HPP_NOEXCEPT { pCommandBufferInfos = pCommandBufferInfos_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SubmitInfo2 &setCommandBufferInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &commandBufferInfos_) VULKAN_HPP_NOEXCEPT { commandBufferInfoCount = static_cast(commandBufferInfos_.size()); pCommandBufferInfos = commandBufferInfos_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 &setSignalSemaphoreInfoCount(uint32_t signalSemaphoreInfoCount_) VULKAN_HPP_NOEXCEPT { signalSemaphoreInfoCount = signalSemaphoreInfoCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPSignalSemaphoreInfos(const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo *pSignalSemaphoreInfos_) VULKAN_HPP_NOEXCEPT { pSignalSemaphoreInfos = pSignalSemaphoreInfos_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SubmitInfo2 &setSignalSemaphoreInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &signalSemaphoreInfos_) VULKAN_HPP_NOEXCEPT { signalSemaphoreInfoCount = static_cast(signalSemaphoreInfos_.size()); pSignalSemaphoreInfos = signalSemaphoreInfos_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSubmitInfo2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSubmitInfo2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, waitSemaphoreInfoCount, pWaitSemaphoreInfos, commandBufferInfoCount, pCommandBufferInfos, signalSemaphoreInfoCount, pSignalSemaphoreInfos); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SubmitInfo2 const &) const = default; #else bool operator==(SubmitInfo2 const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (waitSemaphoreInfoCount == rhs.waitSemaphoreInfoCount) && (pWaitSemaphoreInfos == rhs.pWaitSemaphoreInfos) && (commandBufferInfoCount == rhs.commandBufferInfoCount) && (pCommandBufferInfos == rhs.pCommandBufferInfos) && (signalSemaphoreInfoCount == rhs.signalSemaphoreInfoCount) && (pSignalSemaphoreInfos == rhs.pSignalSemaphoreInfos); # endif } bool operator!=(SubmitInfo2 const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo2; const void *pNext = {}; VULKAN_HPP_NAMESPACE::SubmitFlags flags = {}; uint32_t waitSemaphoreInfoCount = {}; const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo *pWaitSemaphoreInfos = {}; uint32_t commandBufferInfoCount = {}; const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo *pCommandBufferInfos = {}; uint32_t signalSemaphoreInfoCount = {}; const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo *pSignalSemaphoreInfos = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SubmitInfo2) == sizeof(VkSubmitInfo2), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SubmitInfo2 is not nothrow_move_constructible!"); template<> struct CppType { using Type = SubmitInfo2; }; using SubmitInfo2KHR = SubmitInfo2; struct SubpassBeginInfo { using NativeType = VkSubpassBeginInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassBeginInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SubpassBeginInfo(VULKAN_HPP_NAMESPACE::SubpassContents contents_ = VULKAN_HPP_NAMESPACE::SubpassContents::eInline, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), contents(contents_) { } VULKAN_HPP_CONSTEXPR SubpassBeginInfo(SubpassBeginInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SubpassBeginInfo(VkSubpassBeginInfo const &rhs) VULKAN_HPP_NOEXCEPT : SubpassBeginInfo(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SubpassBeginInfo &operator=(SubpassBeginInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SubpassBeginInfo &operator=(VkSubpassBeginInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo &setContents(VULKAN_HPP_NAMESPACE::SubpassContents contents_) VULKAN_HPP_NOEXCEPT { contents = contents_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSubpassBeginInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSubpassBeginInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, contents); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SubpassBeginInfo const &) const = default; #else bool operator==(SubpassBeginInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (contents == rhs.contents); # endif } bool operator!=(SubpassBeginInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassBeginInfo; const void *pNext = {}; VULKAN_HPP_NAMESPACE::SubpassContents contents = VULKAN_HPP_NAMESPACE::SubpassContents::eInline; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SubpassBeginInfo) == sizeof(VkSubpassBeginInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SubpassBeginInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = SubpassBeginInfo; }; using SubpassBeginInfoKHR = SubpassBeginInfo; struct SubpassDescriptionDepthStencilResolve { using NativeType = VkSubpassDescriptionDepthStencilResolve; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescriptionDepthStencilResolve; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve(VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, const VULKAN_HPP_NAMESPACE::AttachmentReference2 *pDepthStencilResolveAttachment_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), depthResolveMode(depthResolveMode_), stencilResolveMode(stencilResolveMode_), pDepthStencilResolveAttachment(pDepthStencilResolveAttachment_) { } VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve(SubpassDescriptionDepthStencilResolve const &rhs) VULKAN_HPP_NOEXCEPT = default; SubpassDescriptionDepthStencilResolve(VkSubpassDescriptionDepthStencilResolve const &rhs) VULKAN_HPP_NOEXCEPT : SubpassDescriptionDepthStencilResolve(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SubpassDescriptionDepthStencilResolve &operator=(SubpassDescriptionDepthStencilResolve const &rhs) VULKAN_HPP_NOEXCEPT = default; SubpassDescriptionDepthStencilResolve &operator=(VkSubpassDescriptionDepthStencilResolve const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & setDepthResolveMode(VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_) VULKAN_HPP_NOEXCEPT { depthResolveMode = depthResolveMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & setStencilResolveMode(VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_) VULKAN_HPP_NOEXCEPT { stencilResolveMode = stencilResolveMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & setPDepthStencilResolveAttachment(const VULKAN_HPP_NAMESPACE::AttachmentReference2 *pDepthStencilResolveAttachment_) VULKAN_HPP_NOEXCEPT { pDepthStencilResolveAttachment = pDepthStencilResolveAttachment_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSubpassDescriptionDepthStencilResolve const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSubpassDescriptionDepthStencilResolve &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, depthResolveMode, stencilResolveMode, pDepthStencilResolveAttachment); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SubpassDescriptionDepthStencilResolve const &) const = default; #else bool operator==(SubpassDescriptionDepthStencilResolve const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (depthResolveMode == rhs.depthResolveMode) && (stencilResolveMode == rhs.stencilResolveMode) && (pDepthStencilResolveAttachment == rhs.pDepthStencilResolveAttachment); # endif } bool operator!=(SubpassDescriptionDepthStencilResolve const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescriptionDepthStencilResolve; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone; VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone; const VULKAN_HPP_NAMESPACE::AttachmentReference2 *pDepthStencilResolveAttachment = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve) == sizeof(VkSubpassDescriptionDepthStencilResolve), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SubpassDescriptionDepthStencilResolve is not nothrow_move_constructible!"); template<> struct CppType { using Type = SubpassDescriptionDepthStencilResolve; }; using SubpassDescriptionDepthStencilResolveKHR = SubpassDescriptionDepthStencilResolve; struct SubpassEndInfo { using NativeType = VkSubpassEndInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassEndInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SubpassEndInfo(const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_) {} VULKAN_HPP_CONSTEXPR SubpassEndInfo(SubpassEndInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SubpassEndInfo(VkSubpassEndInfo const &rhs) VULKAN_HPP_NOEXCEPT : SubpassEndInfo(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SubpassEndInfo &operator=(SubpassEndInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; SubpassEndInfo &operator=(VkSubpassEndInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SubpassEndInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSubpassEndInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSubpassEndInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SubpassEndInfo const &) const = default; #else bool operator==(SubpassEndInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext); # endif } bool operator!=(SubpassEndInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassEndInfo; const void *pNext = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SubpassEndInfo) == sizeof(VkSubpassEndInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SubpassEndInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = SubpassEndInfo; }; using SubpassEndInfoKHR = SubpassEndInfo; struct SubpassFragmentDensityMapOffsetEndInfoQCOM { using NativeType = VkSubpassFragmentDensityMapOffsetEndInfoQCOM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassFragmentDensityMapOffsetEndInfoQCOM; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SubpassFragmentDensityMapOffsetEndInfoQCOM(uint32_t fragmentDensityOffsetCount_ = {}, const VULKAN_HPP_NAMESPACE::Offset2D *pFragmentDensityOffsets_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), fragmentDensityOffsetCount(fragmentDensityOffsetCount_), pFragmentDensityOffsets(pFragmentDensityOffsets_) { } VULKAN_HPP_CONSTEXPR SubpassFragmentDensityMapOffsetEndInfoQCOM(SubpassFragmentDensityMapOffsetEndInfoQCOM const &rhs) VULKAN_HPP_NOEXCEPT = default; SubpassFragmentDensityMapOffsetEndInfoQCOM(VkSubpassFragmentDensityMapOffsetEndInfoQCOM const &rhs) VULKAN_HPP_NOEXCEPT : SubpassFragmentDensityMapOffsetEndInfoQCOM(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SubpassFragmentDensityMapOffsetEndInfoQCOM( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &fragmentDensityOffsets_, const void *pNext_ = nullptr) : pNext(pNext_) , fragmentDensityOffsetCount(static_cast(fragmentDensityOffsets_.size())) , pFragmentDensityOffsets(fragmentDensityOffsets_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SubpassFragmentDensityMapOffsetEndInfoQCOM &operator=(SubpassFragmentDensityMapOffsetEndInfoQCOM const &rhs) VULKAN_HPP_NOEXCEPT = default; SubpassFragmentDensityMapOffsetEndInfoQCOM &operator=(VkSubpassFragmentDensityMapOffsetEndInfoQCOM const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SubpassFragmentDensityMapOffsetEndInfoQCOM &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassFragmentDensityMapOffsetEndInfoQCOM & setFragmentDensityOffsetCount(uint32_t fragmentDensityOffsetCount_) VULKAN_HPP_NOEXCEPT { fragmentDensityOffsetCount = fragmentDensityOffsetCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubpassFragmentDensityMapOffsetEndInfoQCOM & setPFragmentDensityOffsets(const VULKAN_HPP_NAMESPACE::Offset2D *pFragmentDensityOffsets_) VULKAN_HPP_NOEXCEPT { pFragmentDensityOffsets = pFragmentDensityOffsets_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SubpassFragmentDensityMapOffsetEndInfoQCOM &setFragmentDensityOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &fragmentDensityOffsets_) VULKAN_HPP_NOEXCEPT { fragmentDensityOffsetCount = static_cast(fragmentDensityOffsets_.size()); pFragmentDensityOffsets = fragmentDensityOffsets_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSubpassFragmentDensityMapOffsetEndInfoQCOM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSubpassFragmentDensityMapOffsetEndInfoQCOM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, fragmentDensityOffsetCount, pFragmentDensityOffsets); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SubpassFragmentDensityMapOffsetEndInfoQCOM const &) const = default; #else bool operator==(SubpassFragmentDensityMapOffsetEndInfoQCOM const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (fragmentDensityOffsetCount == rhs.fragmentDensityOffsetCount) && (pFragmentDensityOffsets == rhs.pFragmentDensityOffsets); # endif } bool operator!=(SubpassFragmentDensityMapOffsetEndInfoQCOM const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassFragmentDensityMapOffsetEndInfoQCOM; const void *pNext = {}; uint32_t fragmentDensityOffsetCount = {}; const VULKAN_HPP_NAMESPACE::Offset2D *pFragmentDensityOffsets = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM) == sizeof(VkSubpassFragmentDensityMapOffsetEndInfoQCOM), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SubpassFragmentDensityMapOffsetEndInfoQCOM is not nothrow_move_constructible!"); template<> struct CppType { using Type = SubpassFragmentDensityMapOffsetEndInfoQCOM; }; struct SubpassShadingPipelineCreateInfoHUAWEI { using NativeType = VkSubpassShadingPipelineCreateInfoHUAWEI; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassShadingPipelineCreateInfoHUAWEI; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SubpassShadingPipelineCreateInfoHUAWEI(VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t subpass_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), renderPass(renderPass_), subpass(subpass_) { } VULKAN_HPP_CONSTEXPR SubpassShadingPipelineCreateInfoHUAWEI(SubpassShadingPipelineCreateInfoHUAWEI const &rhs) VULKAN_HPP_NOEXCEPT = default; SubpassShadingPipelineCreateInfoHUAWEI(VkSubpassShadingPipelineCreateInfoHUAWEI const &rhs) VULKAN_HPP_NOEXCEPT : SubpassShadingPipelineCreateInfoHUAWEI(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SubpassShadingPipelineCreateInfoHUAWEI &operator=(SubpassShadingPipelineCreateInfoHUAWEI const &rhs) VULKAN_HPP_NOEXCEPT = default; SubpassShadingPipelineCreateInfoHUAWEI &operator=(VkSubpassShadingPipelineCreateInfoHUAWEI const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkSubpassShadingPipelineCreateInfoHUAWEI const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSubpassShadingPipelineCreateInfoHUAWEI &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, renderPass, subpass); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SubpassShadingPipelineCreateInfoHUAWEI const &) const = default; #else bool operator==(SubpassShadingPipelineCreateInfoHUAWEI const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (renderPass == rhs.renderPass) && (subpass == rhs.subpass); # endif } bool operator!=(SubpassShadingPipelineCreateInfoHUAWEI const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassShadingPipelineCreateInfoHUAWEI; void *pNext = {}; VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; uint32_t subpass = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI) == sizeof(VkSubpassShadingPipelineCreateInfoHUAWEI), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SubpassShadingPipelineCreateInfoHUAWEI is not nothrow_move_constructible!"); template<> struct CppType { using Type = SubpassShadingPipelineCreateInfoHUAWEI; }; struct SurfaceCapabilities2EXT { using NativeType = VkSurfaceCapabilities2EXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2EXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( uint32_t minImageCount_ = {}, uint32_t maxImageCount_ = {}, VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, uint32_t maxImageArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {}, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), minImageCount(minImageCount_), maxImageCount(maxImageCount_), currentExtent(currentExtent_), minImageExtent(minImageExtent_), maxImageExtent(maxImageExtent_), maxImageArrayLayers(maxImageArrayLayers_), supportedTransforms(supportedTransforms_), currentTransform(currentTransform_), supportedCompositeAlpha(supportedCompositeAlpha_), supportedUsageFlags(supportedUsageFlags_), supportedSurfaceCounters(supportedSurfaceCounters_) { } VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT(SurfaceCapabilities2EXT const &rhs) VULKAN_HPP_NOEXCEPT = default; SurfaceCapabilities2EXT(VkSurfaceCapabilities2EXT const &rhs) VULKAN_HPP_NOEXCEPT : SurfaceCapabilities2EXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SurfaceCapabilities2EXT &operator=(SurfaceCapabilities2EXT const &rhs) VULKAN_HPP_NOEXCEPT = default; SurfaceCapabilities2EXT &operator=(VkSurfaceCapabilities2EXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkSurfaceCapabilities2EXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSurfaceCapabilities2EXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, minImageCount, maxImageCount, currentExtent, minImageExtent, maxImageExtent, maxImageArrayLayers, supportedTransforms, currentTransform, supportedCompositeAlpha, supportedUsageFlags, supportedSurfaceCounters); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SurfaceCapabilities2EXT const &) const = default; #else bool operator==(SurfaceCapabilities2EXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (minImageCount == rhs.minImageCount) && (maxImageCount == rhs.maxImageCount) && (currentExtent == rhs.currentExtent) && (minImageExtent == rhs.minImageExtent) && (maxImageExtent == rhs.maxImageExtent) && (maxImageArrayLayers == rhs.maxImageArrayLayers) && (supportedTransforms == rhs.supportedTransforms) && (currentTransform == rhs.currentTransform) && (supportedCompositeAlpha == rhs.supportedCompositeAlpha) && (supportedUsageFlags == rhs.supportedUsageFlags) && (supportedSurfaceCounters == rhs.supportedSurfaceCounters); # endif } bool operator!=(SurfaceCapabilities2EXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2EXT; void *pNext = {}; uint32_t minImageCount = {}; uint32_t maxImageCount = {}; VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {}; VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {}; VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {}; uint32_t maxImageArrayLayers = {}; VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {}; VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {}; VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT) == sizeof(VkSurfaceCapabilities2EXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SurfaceCapabilities2EXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = SurfaceCapabilities2EXT; }; struct SurfaceCapabilitiesKHR { using NativeType = VkSurfaceCapabilitiesKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR( uint32_t minImageCount_ = {}, uint32_t maxImageCount_ = {}, VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, uint32_t maxImageArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {}) VULKAN_HPP_NOEXCEPT : minImageCount(minImageCount_), maxImageCount(maxImageCount_), currentExtent(currentExtent_), minImageExtent(minImageExtent_), maxImageExtent(maxImageExtent_), maxImageArrayLayers(maxImageArrayLayers_), supportedTransforms(supportedTransforms_), currentTransform(currentTransform_), supportedCompositeAlpha(supportedCompositeAlpha_), supportedUsageFlags(supportedUsageFlags_) { } VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR(SurfaceCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; SurfaceCapabilitiesKHR(VkSurfaceCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT : SurfaceCapabilitiesKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SurfaceCapabilitiesKHR &operator=(SurfaceCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; SurfaceCapabilitiesKHR &operator=(VkSurfaceCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(minImageCount, maxImageCount, currentExtent, minImageExtent, maxImageExtent, maxImageArrayLayers, supportedTransforms, currentTransform, supportedCompositeAlpha, supportedUsageFlags); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SurfaceCapabilitiesKHR const &) const = default; #else bool operator==(SurfaceCapabilitiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (minImageCount == rhs.minImageCount) && (maxImageCount == rhs.maxImageCount) && (currentExtent == rhs.currentExtent) && (minImageExtent == rhs.minImageExtent) && (maxImageExtent == rhs.maxImageExtent) && (maxImageArrayLayers == rhs.maxImageArrayLayers) && (supportedTransforms == rhs.supportedTransforms) && (currentTransform == rhs.currentTransform) && (supportedCompositeAlpha == rhs.supportedCompositeAlpha) && (supportedUsageFlags == rhs.supportedUsageFlags); # endif } bool operator!=(SurfaceCapabilitiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t minImageCount = {}; uint32_t maxImageCount = {}; VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {}; VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {}; VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {}; uint32_t maxImageArrayLayers = {}; VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {}; VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR) == sizeof(VkSurfaceCapabilitiesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SurfaceCapabilitiesKHR is not nothrow_move_constructible!"); struct SurfaceCapabilities2KHR { using NativeType = VkSurfaceCapabilities2KHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2KHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR(VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), surfaceCapabilities(surfaceCapabilities_) { } VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR(SurfaceCapabilities2KHR const &rhs) VULKAN_HPP_NOEXCEPT = default; SurfaceCapabilities2KHR(VkSurfaceCapabilities2KHR const &rhs) VULKAN_HPP_NOEXCEPT : SurfaceCapabilities2KHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SurfaceCapabilities2KHR &operator=(SurfaceCapabilities2KHR const &rhs) VULKAN_HPP_NOEXCEPT = default; SurfaceCapabilities2KHR &operator=(VkSurfaceCapabilities2KHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkSurfaceCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSurfaceCapabilities2KHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, surfaceCapabilities); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SurfaceCapabilities2KHR const &) const = default; #else bool operator==(SurfaceCapabilities2KHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (surfaceCapabilities == rhs.surfaceCapabilities); # endif } bool operator!=(SurfaceCapabilities2KHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2KHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR) == sizeof(VkSurfaceCapabilities2KHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SurfaceCapabilities2KHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = SurfaceCapabilities2KHR; }; #if defined(VK_USE_PLATFORM_WIN32_KHR) struct SurfaceCapabilitiesFullScreenExclusiveEXT { using NativeType = VkSurfaceCapabilitiesFullScreenExclusiveEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT(VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), fullScreenExclusiveSupported(fullScreenExclusiveSupported_) { } VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT(SurfaceCapabilitiesFullScreenExclusiveEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; SurfaceCapabilitiesFullScreenExclusiveEXT(VkSurfaceCapabilitiesFullScreenExclusiveEXT const &rhs) VULKAN_HPP_NOEXCEPT : SurfaceCapabilitiesFullScreenExclusiveEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SurfaceCapabilitiesFullScreenExclusiveEXT &operator=(SurfaceCapabilitiesFullScreenExclusiveEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; SurfaceCapabilitiesFullScreenExclusiveEXT &operator=(VkSurfaceCapabilitiesFullScreenExclusiveEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesFullScreenExclusiveEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesFullScreenExclusiveEXT & setFullScreenExclusiveSupported(VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_) VULKAN_HPP_NOEXCEPT { fullScreenExclusiveSupported = fullScreenExclusiveSupported_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSurfaceCapabilitiesFullScreenExclusiveEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSurfaceCapabilitiesFullScreenExclusiveEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, fullScreenExclusiveSupported); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SurfaceCapabilitiesFullScreenExclusiveEXT const &) const = default; # else bool operator==(SurfaceCapabilitiesFullScreenExclusiveEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (fullScreenExclusiveSupported == rhs.fullScreenExclusiveSupported); # endif } bool operator!=(SurfaceCapabilitiesFullScreenExclusiveEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT) == sizeof(VkSurfaceCapabilitiesFullScreenExclusiveEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SurfaceCapabilitiesFullScreenExclusiveEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = SurfaceCapabilitiesFullScreenExclusiveEXT; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ struct SurfaceFormatKHR { using NativeType = VkSurfaceFormatKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SurfaceFormatKHR(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear) VULKAN_HPP_NOEXCEPT : format(format_), colorSpace(colorSpace_) { } VULKAN_HPP_CONSTEXPR SurfaceFormatKHR(SurfaceFormatKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; SurfaceFormatKHR(VkSurfaceFormatKHR const &rhs) VULKAN_HPP_NOEXCEPT : SurfaceFormatKHR(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SurfaceFormatKHR &operator=(SurfaceFormatKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; SurfaceFormatKHR &operator=(VkSurfaceFormatKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkSurfaceFormatKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSurfaceFormatKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(format, colorSpace); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SurfaceFormatKHR const &) const = default; #else bool operator==(SurfaceFormatKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (format == rhs.format) && (colorSpace == rhs.colorSpace); # endif } bool operator!=(SurfaceFormatKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SurfaceFormatKHR) == sizeof(VkSurfaceFormatKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SurfaceFormatKHR is not nothrow_move_constructible!"); struct SurfaceFormat2KHR { using NativeType = VkSurfaceFormat2KHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFormat2KHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR(VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), surfaceFormat(surfaceFormat_) { } VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR(SurfaceFormat2KHR const &rhs) VULKAN_HPP_NOEXCEPT = default; SurfaceFormat2KHR(VkSurfaceFormat2KHR const &rhs) VULKAN_HPP_NOEXCEPT : SurfaceFormat2KHR(*reinterpret_cast(&rhs)) {} #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SurfaceFormat2KHR &operator=(SurfaceFormat2KHR const &rhs) VULKAN_HPP_NOEXCEPT = default; SurfaceFormat2KHR &operator=(VkSurfaceFormat2KHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkSurfaceFormat2KHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSurfaceFormat2KHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, surfaceFormat); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SurfaceFormat2KHR const &) const = default; #else bool operator==(SurfaceFormat2KHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (surfaceFormat == rhs.surfaceFormat); # endif } bool operator!=(SurfaceFormat2KHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFormat2KHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR) == sizeof(VkSurfaceFormat2KHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SurfaceFormat2KHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = SurfaceFormat2KHR; }; #if defined(VK_USE_PLATFORM_WIN32_KHR) struct SurfaceFullScreenExclusiveInfoEXT { using NativeType = VkSurfaceFullScreenExclusiveInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFullScreenExclusiveInfoEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), fullScreenExclusive(fullScreenExclusive_) { } VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT(SurfaceFullScreenExclusiveInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; SurfaceFullScreenExclusiveInfoEXT(VkSurfaceFullScreenExclusiveInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : SurfaceFullScreenExclusiveInfoEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SurfaceFullScreenExclusiveInfoEXT &operator=(SurfaceFullScreenExclusiveInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; SurfaceFullScreenExclusiveInfoEXT &operator=(VkSurfaceFullScreenExclusiveInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveInfoEXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveInfoEXT & setFullScreenExclusive(VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_) VULKAN_HPP_NOEXCEPT { fullScreenExclusive = fullScreenExclusive_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSurfaceFullScreenExclusiveInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSurfaceFullScreenExclusiveInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, fullScreenExclusive); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SurfaceFullScreenExclusiveInfoEXT const &) const = default; # else bool operator==(SurfaceFullScreenExclusiveInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (fullScreenExclusive == rhs.fullScreenExclusive); # endif } bool operator!=(SurfaceFullScreenExclusiveInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveInfoEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT) == sizeof(VkSurfaceFullScreenExclusiveInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SurfaceFullScreenExclusiveInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = SurfaceFullScreenExclusiveInfoEXT; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #if defined(VK_USE_PLATFORM_WIN32_KHR) struct SurfaceFullScreenExclusiveWin32InfoEXT { using NativeType = VkSurfaceFullScreenExclusiveWin32InfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT(HMONITOR hmonitor_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), hmonitor(hmonitor_) { } VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT(SurfaceFullScreenExclusiveWin32InfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; SurfaceFullScreenExclusiveWin32InfoEXT(VkSurfaceFullScreenExclusiveWin32InfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : SurfaceFullScreenExclusiveWin32InfoEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SurfaceFullScreenExclusiveWin32InfoEXT &operator=(SurfaceFullScreenExclusiveWin32InfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; SurfaceFullScreenExclusiveWin32InfoEXT &operator=(VkSurfaceFullScreenExclusiveWin32InfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveWin32InfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveWin32InfoEXT &setHmonitor(HMONITOR hmonitor_) VULKAN_HPP_NOEXCEPT { hmonitor = hmonitor_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSurfaceFullScreenExclusiveWin32InfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSurfaceFullScreenExclusiveWin32InfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, hmonitor); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SurfaceFullScreenExclusiveWin32InfoEXT const &) const = default; # else bool operator==(SurfaceFullScreenExclusiveWin32InfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (hmonitor == rhs.hmonitor); # endif } bool operator!=(SurfaceFullScreenExclusiveWin32InfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT; const void *pNext = {}; HMONITOR hmonitor = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT) == sizeof(VkSurfaceFullScreenExclusiveWin32InfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SurfaceFullScreenExclusiveWin32InfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = SurfaceFullScreenExclusiveWin32InfoEXT; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ struct SurfaceProtectedCapabilitiesKHR { using NativeType = VkSurfaceProtectedCapabilitiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceProtectedCapabilitiesKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR(VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), supportsProtected(supportsProtected_) { } VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR(SurfaceProtectedCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; SurfaceProtectedCapabilitiesKHR(VkSurfaceProtectedCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT : SurfaceProtectedCapabilitiesKHR(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SurfaceProtectedCapabilitiesKHR &operator=(SurfaceProtectedCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; SurfaceProtectedCapabilitiesKHR &operator=(VkSurfaceProtectedCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SurfaceProtectedCapabilitiesKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SurfaceProtectedCapabilitiesKHR &setSupportsProtected(VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_) VULKAN_HPP_NOEXCEPT { supportsProtected = supportsProtected_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSurfaceProtectedCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSurfaceProtectedCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, supportsProtected); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SurfaceProtectedCapabilitiesKHR const &) const = default; #else bool operator==(SurfaceProtectedCapabilitiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (supportsProtected == rhs.supportsProtected); # endif } bool operator!=(SurfaceProtectedCapabilitiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceProtectedCapabilitiesKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 supportsProtected = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR) == sizeof(VkSurfaceProtectedCapabilitiesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SurfaceProtectedCapabilitiesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = SurfaceProtectedCapabilitiesKHR; }; struct SwapchainCounterCreateInfoEXT { using NativeType = VkSwapchainCounterCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCounterCreateInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT(VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), surfaceCounters(surfaceCounters_) { } VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT(SwapchainCounterCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; SwapchainCounterCreateInfoEXT(VkSwapchainCounterCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : SwapchainCounterCreateInfoEXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SwapchainCounterCreateInfoEXT &operator=(SwapchainCounterCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; SwapchainCounterCreateInfoEXT &operator=(VkSwapchainCounterCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SwapchainCounterCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SwapchainCounterCreateInfoEXT & setSurfaceCounters(VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_) VULKAN_HPP_NOEXCEPT { surfaceCounters = surfaceCounters_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSwapchainCounterCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSwapchainCounterCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, surfaceCounters); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SwapchainCounterCreateInfoEXT const &) const = default; #else bool operator==(SwapchainCounterCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (surfaceCounters == rhs.surfaceCounters); # endif } bool operator!=(SwapchainCounterCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCounterCreateInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT) == sizeof(VkSwapchainCounterCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SwapchainCounterCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = SwapchainCounterCreateInfoEXT; }; struct SwapchainCreateInfoKHR { using NativeType = VkSwapchainCreateInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCreateInfoKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}, uint32_t minImageCount_ = {}, VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear, VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}, uint32_t imageArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t *pQueueFamilyIndices_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), surface(surface_), minImageCount(minImageCount_), imageFormat(imageFormat_), imageColorSpace(imageColorSpace_), imageExtent(imageExtent_), imageArrayLayers(imageArrayLayers_), imageUsage(imageUsage_), imageSharingMode(imageSharingMode_), queueFamilyIndexCount(queueFamilyIndexCount_), pQueueFamilyIndices(pQueueFamilyIndices_), preTransform(preTransform_), compositeAlpha(compositeAlpha_), presentMode(presentMode_), clipped(clipped_), oldSwapchain(oldSwapchain_) { } VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR(SwapchainCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; SwapchainCreateInfoKHR(VkSwapchainCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : SwapchainCreateInfoKHR(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SwapchainCreateInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_, VULKAN_HPP_NAMESPACE::SurfaceKHR surface_, uint32_t minImageCount_, VULKAN_HPP_NAMESPACE::Format imageFormat_, VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_, VULKAN_HPP_NAMESPACE::Extent2D imageExtent_, uint32_t imageArrayLayers_, VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_, VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &queueFamilyIndices_, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , surface(surface_) , minImageCount(minImageCount_) , imageFormat(imageFormat_) , imageColorSpace(imageColorSpace_) , imageExtent(imageExtent_) , imageArrayLayers(imageArrayLayers_) , imageUsage(imageUsage_) , imageSharingMode(imageSharingMode_) , queueFamilyIndexCount(static_cast(queueFamilyIndices_.size())) , pQueueFamilyIndices(queueFamilyIndices_.data()) , preTransform(preTransform_) , compositeAlpha(compositeAlpha_) , presentMode(presentMode_) , clipped(clipped_) , oldSwapchain(oldSwapchain_) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SwapchainCreateInfoKHR &operator=(SwapchainCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; SwapchainCreateInfoKHR &operator=(VkSwapchainCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &setFlags(VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &setSurface(VULKAN_HPP_NAMESPACE::SurfaceKHR surface_) VULKAN_HPP_NOEXCEPT { surface = surface_; return *this; } VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &setMinImageCount(uint32_t minImageCount_) VULKAN_HPP_NOEXCEPT { minImageCount = minImageCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &setImageFormat(VULKAN_HPP_NAMESPACE::Format imageFormat_) VULKAN_HPP_NOEXCEPT { imageFormat = imageFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &setImageColorSpace(VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_) VULKAN_HPP_NOEXCEPT { imageColorSpace = imageColorSpace_; return *this; } VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &setImageExtent(VULKAN_HPP_NAMESPACE::Extent2D const &imageExtent_) VULKAN_HPP_NOEXCEPT { imageExtent = imageExtent_; return *this; } VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &setImageArrayLayers(uint32_t imageArrayLayers_) VULKAN_HPP_NOEXCEPT { imageArrayLayers = imageArrayLayers_; return *this; } VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &setImageUsage(VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_) VULKAN_HPP_NOEXCEPT { imageUsage = imageUsage_; return *this; } VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &setImageSharingMode(VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_) VULKAN_HPP_NOEXCEPT { imageSharingMode = imageSharingMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &setQueueFamilyIndexCount(uint32_t queueFamilyIndexCount_) VULKAN_HPP_NOEXCEPT { queueFamilyIndexCount = queueFamilyIndexCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &setPQueueFamilyIndices(const uint32_t *pQueueFamilyIndices_) VULKAN_HPP_NOEXCEPT { pQueueFamilyIndices = pQueueFamilyIndices_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) SwapchainCreateInfoKHR & setQueueFamilyIndices(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &queueFamilyIndices_) VULKAN_HPP_NOEXCEPT { queueFamilyIndexCount = static_cast(queueFamilyIndices_.size()); pQueueFamilyIndices = queueFamilyIndices_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &setPreTransform(VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_) VULKAN_HPP_NOEXCEPT { preTransform = preTransform_; return *this; } VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &setCompositeAlpha(VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_) VULKAN_HPP_NOEXCEPT { compositeAlpha = compositeAlpha_; return *this; } VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &setPresentMode(VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_) VULKAN_HPP_NOEXCEPT { presentMode = presentMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &setClipped(VULKAN_HPP_NAMESPACE::Bool32 clipped_) VULKAN_HPP_NOEXCEPT { clipped = clipped_; return *this; } VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &setOldSwapchain(VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_) VULKAN_HPP_NOEXCEPT { oldSwapchain = oldSwapchain_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, surface, minImageCount, imageFormat, imageColorSpace, imageExtent, imageArrayLayers, imageUsage, imageSharingMode, queueFamilyIndexCount, pQueueFamilyIndices, preTransform, compositeAlpha, presentMode, clipped, oldSwapchain); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SwapchainCreateInfoKHR const &) const = default; #else bool operator==(SwapchainCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (surface == rhs.surface) && (minImageCount == rhs.minImageCount) && (imageFormat == rhs.imageFormat) && (imageColorSpace == rhs.imageColorSpace) && (imageExtent == rhs.imageExtent) && (imageArrayLayers == rhs.imageArrayLayers) && (imageUsage == rhs.imageUsage) && (imageSharingMode == rhs.imageSharingMode) && (queueFamilyIndexCount == rhs.queueFamilyIndexCount) && (pQueueFamilyIndices == rhs.pQueueFamilyIndices) && (preTransform == rhs.preTransform) && (compositeAlpha == rhs.compositeAlpha) && (presentMode == rhs.presentMode) && (clipped == rhs.clipped) && (oldSwapchain == rhs.oldSwapchain); # endif } bool operator!=(SwapchainCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCreateInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags = {}; VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {}; uint32_t minImageCount = {}; VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear; VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {}; uint32_t imageArrayLayers = {}; VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {}; VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; uint32_t queueFamilyIndexCount = {}; const uint32_t *pQueueFamilyIndices = {}; VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque; VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate; VULKAN_HPP_NAMESPACE::Bool32 clipped = {}; VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR) == sizeof(VkSwapchainCreateInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SwapchainCreateInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = SwapchainCreateInfoKHR; }; struct SwapchainDisplayNativeHdrCreateInfoAMD { using NativeType = VkSwapchainDisplayNativeHdrCreateInfoAMD; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD(VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), localDimmingEnable(localDimmingEnable_) { } VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD(SwapchainDisplayNativeHdrCreateInfoAMD const &rhs) VULKAN_HPP_NOEXCEPT = default; SwapchainDisplayNativeHdrCreateInfoAMD(VkSwapchainDisplayNativeHdrCreateInfoAMD const &rhs) VULKAN_HPP_NOEXCEPT : SwapchainDisplayNativeHdrCreateInfoAMD(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SwapchainDisplayNativeHdrCreateInfoAMD &operator=(SwapchainDisplayNativeHdrCreateInfoAMD const &rhs) VULKAN_HPP_NOEXCEPT = default; SwapchainDisplayNativeHdrCreateInfoAMD &operator=(VkSwapchainDisplayNativeHdrCreateInfoAMD const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 SwapchainDisplayNativeHdrCreateInfoAMD &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SwapchainDisplayNativeHdrCreateInfoAMD & setLocalDimmingEnable(VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_) VULKAN_HPP_NOEXCEPT { localDimmingEnable = localDimmingEnable_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkSwapchainDisplayNativeHdrCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkSwapchainDisplayNativeHdrCreateInfoAMD &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, localDimmingEnable); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(SwapchainDisplayNativeHdrCreateInfoAMD const &) const = default; #else bool operator==(SwapchainDisplayNativeHdrCreateInfoAMD const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (localDimmingEnable == rhs.localDimmingEnable); # endif } bool operator!=(SwapchainDisplayNativeHdrCreateInfoAMD const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD) == sizeof(VkSwapchainDisplayNativeHdrCreateInfoAMD), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "SwapchainDisplayNativeHdrCreateInfoAMD is not nothrow_move_constructible!"); template<> struct CppType { using Type = SwapchainDisplayNativeHdrCreateInfoAMD; }; struct TextureLODGatherFormatPropertiesAMD { using NativeType = VkTextureLODGatherFormatPropertiesAMD; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTextureLodGatherFormatPropertiesAMD; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD(VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), supportsTextureGatherLODBiasAMD(supportsTextureGatherLODBiasAMD_) { } VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD(TextureLODGatherFormatPropertiesAMD const &rhs) VULKAN_HPP_NOEXCEPT = default; TextureLODGatherFormatPropertiesAMD(VkTextureLODGatherFormatPropertiesAMD const &rhs) VULKAN_HPP_NOEXCEPT : TextureLODGatherFormatPropertiesAMD(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ TextureLODGatherFormatPropertiesAMD &operator=(TextureLODGatherFormatPropertiesAMD const &rhs) VULKAN_HPP_NOEXCEPT = default; TextureLODGatherFormatPropertiesAMD &operator=(VkTextureLODGatherFormatPropertiesAMD const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkTextureLODGatherFormatPropertiesAMD const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkTextureLODGatherFormatPropertiesAMD &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, supportsTextureGatherLODBiasAMD); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(TextureLODGatherFormatPropertiesAMD const &) const = default; #else bool operator==(TextureLODGatherFormatPropertiesAMD const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (supportsTextureGatherLODBiasAMD == rhs.supportsTextureGatherLODBiasAMD); # endif } bool operator!=(TextureLODGatherFormatPropertiesAMD const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTextureLodGatherFormatPropertiesAMD; void *pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD) == sizeof(VkTextureLODGatherFormatPropertiesAMD), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "TextureLODGatherFormatPropertiesAMD is not nothrow_move_constructible!"); template<> struct CppType { using Type = TextureLODGatherFormatPropertiesAMD; }; struct TimelineSemaphoreSubmitInfo { using NativeType = VkTimelineSemaphoreSubmitInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTimelineSemaphoreSubmitInfo; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo(uint32_t waitSemaphoreValueCount_ = {}, const uint64_t *pWaitSemaphoreValues_ = {}, uint32_t signalSemaphoreValueCount_ = {}, const uint64_t *pSignalSemaphoreValues_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), waitSemaphoreValueCount(waitSemaphoreValueCount_), pWaitSemaphoreValues(pWaitSemaphoreValues_), signalSemaphoreValueCount(signalSemaphoreValueCount_), pSignalSemaphoreValues(pSignalSemaphoreValues_) { } VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo(TimelineSemaphoreSubmitInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; TimelineSemaphoreSubmitInfo(VkTimelineSemaphoreSubmitInfo const &rhs) VULKAN_HPP_NOEXCEPT : TimelineSemaphoreSubmitInfo(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) TimelineSemaphoreSubmitInfo(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &waitSemaphoreValues_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &signalSemaphoreValues_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , waitSemaphoreValueCount(static_cast(waitSemaphoreValues_.size())) , pWaitSemaphoreValues(waitSemaphoreValues_.data()) , signalSemaphoreValueCount(static_cast(signalSemaphoreValues_.size())) , pSignalSemaphoreValues(signalSemaphoreValues_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ TimelineSemaphoreSubmitInfo &operator=(TimelineSemaphoreSubmitInfo const &rhs) VULKAN_HPP_NOEXCEPT = default; TimelineSemaphoreSubmitInfo &operator=(VkTimelineSemaphoreSubmitInfo const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo &setWaitSemaphoreValueCount(uint32_t waitSemaphoreValueCount_) VULKAN_HPP_NOEXCEPT { waitSemaphoreValueCount = waitSemaphoreValueCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo &setPWaitSemaphoreValues(const uint64_t *pWaitSemaphoreValues_) VULKAN_HPP_NOEXCEPT { pWaitSemaphoreValues = pWaitSemaphoreValues_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) TimelineSemaphoreSubmitInfo & setWaitSemaphoreValues(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &waitSemaphoreValues_) VULKAN_HPP_NOEXCEPT { waitSemaphoreValueCount = static_cast(waitSemaphoreValues_.size()); pWaitSemaphoreValues = waitSemaphoreValues_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo &setSignalSemaphoreValueCount(uint32_t signalSemaphoreValueCount_) VULKAN_HPP_NOEXCEPT { signalSemaphoreValueCount = signalSemaphoreValueCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo &setPSignalSemaphoreValues(const uint64_t *pSignalSemaphoreValues_) VULKAN_HPP_NOEXCEPT { pSignalSemaphoreValues = pSignalSemaphoreValues_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) TimelineSemaphoreSubmitInfo & setSignalSemaphoreValues(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &signalSemaphoreValues_) VULKAN_HPP_NOEXCEPT { signalSemaphoreValueCount = static_cast(signalSemaphoreValues_.size()); pSignalSemaphoreValues = signalSemaphoreValues_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkTimelineSemaphoreSubmitInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkTimelineSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, waitSemaphoreValueCount, pWaitSemaphoreValues, signalSemaphoreValueCount, pSignalSemaphoreValues); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(TimelineSemaphoreSubmitInfo const &) const = default; #else bool operator==(TimelineSemaphoreSubmitInfo const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (waitSemaphoreValueCount == rhs.waitSemaphoreValueCount) && (pWaitSemaphoreValues == rhs.pWaitSemaphoreValues) && (signalSemaphoreValueCount == rhs.signalSemaphoreValueCount) && (pSignalSemaphoreValues == rhs.pSignalSemaphoreValues); # endif } bool operator!=(TimelineSemaphoreSubmitInfo const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTimelineSemaphoreSubmitInfo; const void *pNext = {}; uint32_t waitSemaphoreValueCount = {}; const uint64_t *pWaitSemaphoreValues = {}; uint32_t signalSemaphoreValueCount = {}; const uint64_t *pSignalSemaphoreValues = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo) == sizeof(VkTimelineSemaphoreSubmitInfo), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "TimelineSemaphoreSubmitInfo is not nothrow_move_constructible!"); template<> struct CppType { using Type = TimelineSemaphoreSubmitInfo; }; using TimelineSemaphoreSubmitInfoKHR = TimelineSemaphoreSubmitInfo; struct TraceRaysIndirectCommandKHR { using NativeType = VkTraceRaysIndirectCommandKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR(uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {}) VULKAN_HPP_NOEXCEPT : width(width_), height(height_), depth(depth_) { } VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR(TraceRaysIndirectCommandKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; TraceRaysIndirectCommandKHR(VkTraceRaysIndirectCommandKHR const &rhs) VULKAN_HPP_NOEXCEPT : TraceRaysIndirectCommandKHR(*reinterpret_cast(&rhs)) { } explicit TraceRaysIndirectCommandKHR(Extent2D const &extent2D, uint32_t depth_ = {}) : width(extent2D.width) , height(extent2D.height) , depth(depth_) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ TraceRaysIndirectCommandKHR &operator=(TraceRaysIndirectCommandKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; TraceRaysIndirectCommandKHR &operator=(VkTraceRaysIndirectCommandKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR &setWidth(uint32_t width_) VULKAN_HPP_NOEXCEPT { width = width_; return *this; } VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR &setHeight(uint32_t height_) VULKAN_HPP_NOEXCEPT { height = height_; return *this; } VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR &setDepth(uint32_t depth_) VULKAN_HPP_NOEXCEPT { depth = depth_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkTraceRaysIndirectCommandKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkTraceRaysIndirectCommandKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(width, height, depth); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(TraceRaysIndirectCommandKHR const &) const = default; #else bool operator==(TraceRaysIndirectCommandKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (width == rhs.width) && (height == rhs.height) && (depth == rhs.depth); # endif } bool operator!=(TraceRaysIndirectCommandKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: uint32_t width = {}; uint32_t height = {}; uint32_t depth = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR) == sizeof(VkTraceRaysIndirectCommandKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "TraceRaysIndirectCommandKHR is not nothrow_move_constructible!"); struct ValidationCacheCreateInfoEXT { using NativeType = VkValidationCacheCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationCacheCreateInfoEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT(VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ = {}, size_t initialDataSize_ = {}, const void *pInitialData_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), initialDataSize(initialDataSize_), pInitialData(pInitialData_) { } VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT(ValidationCacheCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; ValidationCacheCreateInfoEXT(VkValidationCacheCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : ValidationCacheCreateInfoEXT(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) template ValidationCacheCreateInfoEXT(VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &initialData_, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , initialDataSize(initialData_.size() * sizeof(T)) , pInitialData(initialData_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ValidationCacheCreateInfoEXT &operator=(ValidationCacheCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; ValidationCacheCreateInfoEXT &operator=(VkValidationCacheCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT &setFlags(VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT &setInitialDataSize(size_t initialDataSize_) VULKAN_HPP_NOEXCEPT { initialDataSize = initialDataSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT &setPInitialData(const void *pInitialData_) VULKAN_HPP_NOEXCEPT { pInitialData = pInitialData_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) template ValidationCacheCreateInfoEXT &setInitialData(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &initialData_) VULKAN_HPP_NOEXCEPT { initialDataSize = initialData_.size() * sizeof(T); pInitialData = initialData_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, initialDataSize, pInitialData); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ValidationCacheCreateInfoEXT const &) const = default; #else bool operator==(ValidationCacheCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (initialDataSize == rhs.initialDataSize) && (pInitialData == rhs.pInitialData); # endif } bool operator!=(ValidationCacheCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationCacheCreateInfoEXT; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags = {}; size_t initialDataSize = {}; const void *pInitialData = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT) == sizeof(VkValidationCacheCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ValidationCacheCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = ValidationCacheCreateInfoEXT; }; struct ValidationFeaturesEXT { using NativeType = VkValidationFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFeaturesEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT(uint32_t enabledValidationFeatureCount_ = {}, const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT *pEnabledValidationFeatures_ = {}, uint32_t disabledValidationFeatureCount_ = {}, const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT *pDisabledValidationFeatures_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), enabledValidationFeatureCount(enabledValidationFeatureCount_), pEnabledValidationFeatures(pEnabledValidationFeatures_), disabledValidationFeatureCount(disabledValidationFeatureCount_), pDisabledValidationFeatures(pDisabledValidationFeatures_) { } VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT(ValidationFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; ValidationFeaturesEXT(VkValidationFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT : ValidationFeaturesEXT(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) ValidationFeaturesEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &enabledValidationFeatures_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &disabledValidationFeatures_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , enabledValidationFeatureCount(static_cast(enabledValidationFeatures_.size())) , pEnabledValidationFeatures(enabledValidationFeatures_.data()) , disabledValidationFeatureCount(static_cast(disabledValidationFeatures_.size())) , pDisabledValidationFeatures(disabledValidationFeatures_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ValidationFeaturesEXT &operator=(ValidationFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; ValidationFeaturesEXT &operator=(VkValidationFeaturesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT &setEnabledValidationFeatureCount(uint32_t enabledValidationFeatureCount_) VULKAN_HPP_NOEXCEPT { enabledValidationFeatureCount = enabledValidationFeatureCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setPEnabledValidationFeatures(const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT *pEnabledValidationFeatures_) VULKAN_HPP_NOEXCEPT { pEnabledValidationFeatures = pEnabledValidationFeatures_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) ValidationFeaturesEXT &setEnabledValidationFeatures( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &enabledValidationFeatures_) VULKAN_HPP_NOEXCEPT { enabledValidationFeatureCount = static_cast(enabledValidationFeatures_.size()); pEnabledValidationFeatures = enabledValidationFeatures_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT &setDisabledValidationFeatureCount(uint32_t disabledValidationFeatureCount_) VULKAN_HPP_NOEXCEPT { disabledValidationFeatureCount = disabledValidationFeatureCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setPDisabledValidationFeatures(const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT *pDisabledValidationFeatures_) VULKAN_HPP_NOEXCEPT { pDisabledValidationFeatures = pDisabledValidationFeatures_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) ValidationFeaturesEXT &setDisabledValidationFeatures( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &disabledValidationFeatures_) VULKAN_HPP_NOEXCEPT { disabledValidationFeatureCount = static_cast(disabledValidationFeatures_.size()); pDisabledValidationFeatures = disabledValidationFeatures_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkValidationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkValidationFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, enabledValidationFeatureCount, pEnabledValidationFeatures, disabledValidationFeatureCount, pDisabledValidationFeatures); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ValidationFeaturesEXT const &) const = default; #else bool operator==(ValidationFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (enabledValidationFeatureCount == rhs.enabledValidationFeatureCount) && (pEnabledValidationFeatures == rhs.pEnabledValidationFeatures) && (disabledValidationFeatureCount == rhs.disabledValidationFeatureCount) && (pDisabledValidationFeatures == rhs.pDisabledValidationFeatures); # endif } bool operator!=(ValidationFeaturesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFeaturesEXT; const void *pNext = {}; uint32_t enabledValidationFeatureCount = {}; const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT *pEnabledValidationFeatures = {}; uint32_t disabledValidationFeatureCount = {}; const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT *pDisabledValidationFeatures = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT) == sizeof(VkValidationFeaturesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ValidationFeaturesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = ValidationFeaturesEXT; }; struct ValidationFlagsEXT { using NativeType = VkValidationFlagsEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFlagsEXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ValidationFlagsEXT(uint32_t disabledValidationCheckCount_ = {}, const VULKAN_HPP_NAMESPACE::ValidationCheckEXT *pDisabledValidationChecks_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), disabledValidationCheckCount(disabledValidationCheckCount_), pDisabledValidationChecks(pDisabledValidationChecks_) { } VULKAN_HPP_CONSTEXPR ValidationFlagsEXT(ValidationFlagsEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; ValidationFlagsEXT(VkValidationFlagsEXT const &rhs) VULKAN_HPP_NOEXCEPT : ValidationFlagsEXT(*reinterpret_cast(&rhs)) {} # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) ValidationFlagsEXT(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &disabledValidationChecks_, const void *pNext_ = nullptr) : pNext(pNext_) , disabledValidationCheckCount(static_cast(disabledValidationChecks_.size())) , pDisabledValidationChecks(disabledValidationChecks_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ValidationFlagsEXT &operator=(ValidationFlagsEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; ValidationFlagsEXT &operator=(VkValidationFlagsEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT &setDisabledValidationCheckCount(uint32_t disabledValidationCheckCount_) VULKAN_HPP_NOEXCEPT { disabledValidationCheckCount = disabledValidationCheckCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & setPDisabledValidationChecks(const VULKAN_HPP_NAMESPACE::ValidationCheckEXT *pDisabledValidationChecks_) VULKAN_HPP_NOEXCEPT { pDisabledValidationChecks = pDisabledValidationChecks_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) ValidationFlagsEXT &setDisabledValidationChecks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &disabledValidationChecks_) VULKAN_HPP_NOEXCEPT { disabledValidationCheckCount = static_cast(disabledValidationChecks_.size()); pDisabledValidationChecks = disabledValidationChecks_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkValidationFlagsEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkValidationFlagsEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, disabledValidationCheckCount, pDisabledValidationChecks); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ValidationFlagsEXT const &) const = default; #else bool operator==(ValidationFlagsEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (disabledValidationCheckCount == rhs.disabledValidationCheckCount) && (pDisabledValidationChecks == rhs.pDisabledValidationChecks); # endif } bool operator!=(ValidationFlagsEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFlagsEXT; const void *pNext = {}; uint32_t disabledValidationCheckCount = {}; const VULKAN_HPP_NAMESPACE::ValidationCheckEXT *pDisabledValidationChecks = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ValidationFlagsEXT) == sizeof(VkValidationFlagsEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ValidationFlagsEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = ValidationFlagsEXT; }; struct VertexInputAttributeDescription2EXT { using NativeType = VkVertexInputAttributeDescription2EXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVertexInputAttributeDescription2EXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT(uint32_t location_ = {}, uint32_t binding_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint32_t offset_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), location(location_), binding(binding_), format(format_), offset(offset_) { } VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT(VertexInputAttributeDescription2EXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VertexInputAttributeDescription2EXT(VkVertexInputAttributeDescription2EXT const &rhs) VULKAN_HPP_NOEXCEPT : VertexInputAttributeDescription2EXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VertexInputAttributeDescription2EXT &operator=(VertexInputAttributeDescription2EXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VertexInputAttributeDescription2EXT &operator=(VkVertexInputAttributeDescription2EXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT &setLocation(uint32_t location_) VULKAN_HPP_NOEXCEPT { location = location_; return *this; } VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT &setBinding(uint32_t binding_) VULKAN_HPP_NOEXCEPT { binding = binding_; return *this; } VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT &setFormat(VULKAN_HPP_NAMESPACE::Format format_) VULKAN_HPP_NOEXCEPT { format = format_; return *this; } VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT &setOffset(uint32_t offset_) VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVertexInputAttributeDescription2EXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVertexInputAttributeDescription2EXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, location, binding, format, offset); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VertexInputAttributeDescription2EXT const &) const = default; #else bool operator==(VertexInputAttributeDescription2EXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (location == rhs.location) && (binding == rhs.binding) && (format == rhs.format) && (offset == rhs.offset); # endif } bool operator!=(VertexInputAttributeDescription2EXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVertexInputAttributeDescription2EXT; void *pNext = {}; uint32_t location = {}; uint32_t binding = {}; VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; uint32_t offset = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT) == sizeof(VkVertexInputAttributeDescription2EXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VertexInputAttributeDescription2EXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VertexInputAttributeDescription2EXT; }; struct VertexInputBindingDescription2EXT { using NativeType = VkVertexInputBindingDescription2EXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVertexInputBindingDescription2EXT; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VertexInputBindingDescription2EXT(uint32_t binding_ = {}, uint32_t stride_ = {}, VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex, uint32_t divisor_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), binding(binding_), stride(stride_), inputRate(inputRate_), divisor(divisor_) { } VULKAN_HPP_CONSTEXPR VertexInputBindingDescription2EXT(VertexInputBindingDescription2EXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VertexInputBindingDescription2EXT(VkVertexInputBindingDescription2EXT const &rhs) VULKAN_HPP_NOEXCEPT : VertexInputBindingDescription2EXT(*reinterpret_cast(&rhs)) { } #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VertexInputBindingDescription2EXT &operator=(VertexInputBindingDescription2EXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VertexInputBindingDescription2EXT &operator=(VkVertexInputBindingDescription2EXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT &setBinding(uint32_t binding_) VULKAN_HPP_NOEXCEPT { binding = binding_; return *this; } VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT &setStride(uint32_t stride_) VULKAN_HPP_NOEXCEPT { stride = stride_; return *this; } VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT &setInputRate(VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_) VULKAN_HPP_NOEXCEPT { inputRate = inputRate_; return *this; } VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT &setDivisor(uint32_t divisor_) VULKAN_HPP_NOEXCEPT { divisor = divisor_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVertexInputBindingDescription2EXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVertexInputBindingDescription2EXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, binding, stride, inputRate, divisor); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VertexInputBindingDescription2EXT const &) const = default; #else bool operator==(VertexInputBindingDescription2EXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (binding == rhs.binding) && (stride == rhs.stride) && (inputRate == rhs.inputRate) && (divisor == rhs.divisor); # endif } bool operator!=(VertexInputBindingDescription2EXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVertexInputBindingDescription2EXT; void *pNext = {}; uint32_t binding = {}; uint32_t stride = {}; VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex; uint32_t divisor = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT) == sizeof(VkVertexInputBindingDescription2EXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VertexInputBindingDescription2EXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VertexInputBindingDescription2EXT; }; #if defined(VK_USE_PLATFORM_VI_NN) struct ViSurfaceCreateInfoNN { using NativeType = VkViSurfaceCreateInfoNN; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eViSurfaceCreateInfoNN; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN(VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ = {}, void *window_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), window(window_) { } VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN(ViSurfaceCreateInfoNN const &rhs) VULKAN_HPP_NOEXCEPT = default; ViSurfaceCreateInfoNN(VkViSurfaceCreateInfoNN const &rhs) VULKAN_HPP_NOEXCEPT : ViSurfaceCreateInfoNN(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ViSurfaceCreateInfoNN &operator=(ViSurfaceCreateInfoNN const &rhs) VULKAN_HPP_NOEXCEPT = default; ViSurfaceCreateInfoNN &operator=(VkViSurfaceCreateInfoNN const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN &setFlags(VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN &setWindow(void *window_) VULKAN_HPP_NOEXCEPT { window = window_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkViSurfaceCreateInfoNN const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkViSurfaceCreateInfoNN &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, window); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(ViSurfaceCreateInfoNN const &) const = default; # else bool operator==(ViSurfaceCreateInfoNN const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (window == rhs.window); # endif } bool operator!=(ViSurfaceCreateInfoNN const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eViSurfaceCreateInfoNN; const void *pNext = {}; VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags = {}; void *window = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN) == sizeof(VkViSurfaceCreateInfoNN), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "ViSurfaceCreateInfoNN is not nothrow_move_constructible!"); template<> struct CppType { using Type = ViSurfaceCreateInfoNN; }; #endif /*VK_USE_PLATFORM_VI_NN*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoPictureResourceKHR { using NativeType = VkVideoPictureResourceKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoPictureResourceKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoPictureResourceKHR(VULKAN_HPP_NAMESPACE::Offset2D codedOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D codedExtent_ = {}, uint32_t baseArrayLayer_ = {}, VULKAN_HPP_NAMESPACE::ImageView imageViewBinding_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), codedOffset(codedOffset_), codedExtent(codedExtent_), baseArrayLayer(baseArrayLayer_), imageViewBinding(imageViewBinding_) { } VULKAN_HPP_CONSTEXPR VideoPictureResourceKHR(VideoPictureResourceKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoPictureResourceKHR(VkVideoPictureResourceKHR const &rhs) VULKAN_HPP_NOEXCEPT : VideoPictureResourceKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoPictureResourceKHR &operator=(VideoPictureResourceKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoPictureResourceKHR &operator=(VkVideoPictureResourceKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceKHR &setCodedOffset(VULKAN_HPP_NAMESPACE::Offset2D const &codedOffset_) VULKAN_HPP_NOEXCEPT { codedOffset = codedOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceKHR &setCodedExtent(VULKAN_HPP_NAMESPACE::Extent2D const &codedExtent_) VULKAN_HPP_NOEXCEPT { codedExtent = codedExtent_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceKHR &setBaseArrayLayer(uint32_t baseArrayLayer_) VULKAN_HPP_NOEXCEPT { baseArrayLayer = baseArrayLayer_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceKHR &setImageViewBinding(VULKAN_HPP_NAMESPACE::ImageView imageViewBinding_) VULKAN_HPP_NOEXCEPT { imageViewBinding = imageViewBinding_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoPictureResourceKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoPictureResourceKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, codedOffset, codedExtent, baseArrayLayer, imageViewBinding); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoPictureResourceKHR const &) const = default; # else bool operator==(VideoPictureResourceKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (codedOffset == rhs.codedOffset) && (codedExtent == rhs.codedExtent) && (baseArrayLayer == rhs.baseArrayLayer) && (imageViewBinding == rhs.imageViewBinding); # endif } bool operator!=(VideoPictureResourceKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoPictureResourceKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Offset2D codedOffset = {}; VULKAN_HPP_NAMESPACE::Extent2D codedExtent = {}; uint32_t baseArrayLayer = {}; VULKAN_HPP_NAMESPACE::ImageView imageViewBinding = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR) == sizeof(VkVideoPictureResourceKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoPictureResourceKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoPictureResourceKHR; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoReferenceSlotKHR { using NativeType = VkVideoReferenceSlotKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoReferenceSlotKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoReferenceSlotKHR(int8_t slotIndex_ = {}, const VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR *pPictureResource_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), slotIndex(slotIndex_), pPictureResource(pPictureResource_) { } VULKAN_HPP_CONSTEXPR VideoReferenceSlotKHR(VideoReferenceSlotKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoReferenceSlotKHR(VkVideoReferenceSlotKHR const &rhs) VULKAN_HPP_NOEXCEPT : VideoReferenceSlotKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoReferenceSlotKHR &operator=(VideoReferenceSlotKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoReferenceSlotKHR &operator=(VkVideoReferenceSlotKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotKHR &setSlotIndex(int8_t slotIndex_) VULKAN_HPP_NOEXCEPT { slotIndex = slotIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotKHR & setPPictureResource(const VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR *pPictureResource_) VULKAN_HPP_NOEXCEPT { pPictureResource = pPictureResource_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoReferenceSlotKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoReferenceSlotKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, slotIndex, pPictureResource); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoReferenceSlotKHR const &) const = default; # else bool operator==(VideoReferenceSlotKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (slotIndex == rhs.slotIndex) && (pPictureResource == rhs.pPictureResource); # endif } bool operator!=(VideoReferenceSlotKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoReferenceSlotKHR; const void *pNext = {}; int8_t slotIndex = {}; const VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR *pPictureResource = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR) == sizeof(VkVideoReferenceSlotKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoReferenceSlotKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoReferenceSlotKHR; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoBeginCodingInfoKHR { using NativeType = VkVideoBeginCodingInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoBeginCodingInfoKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoBeginCodingInfoKHR(VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::VideoCodingQualityPresetFlagsKHR codecQualityPreset_ = {}, VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ = {}, VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ = {}, uint32_t referenceSlotCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR *pReferenceSlots_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), codecQualityPreset(codecQualityPreset_), videoSession(videoSession_), videoSessionParameters(videoSessionParameters_), referenceSlotCount(referenceSlotCount_), pReferenceSlots(pReferenceSlots_) { } VULKAN_HPP_CONSTEXPR VideoBeginCodingInfoKHR(VideoBeginCodingInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoBeginCodingInfoKHR(VkVideoBeginCodingInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : VideoBeginCodingInfoKHR(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoBeginCodingInfoKHR(VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_, VULKAN_HPP_NAMESPACE::VideoCodingQualityPresetFlagsKHR codecQualityPreset_, VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_, VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &referenceSlots_, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , codecQualityPreset(codecQualityPreset_) , videoSession(videoSession_) , videoSessionParameters(videoSessionParameters_) , referenceSlotCount(static_cast(referenceSlots_.size())) , pReferenceSlots(referenceSlots_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoBeginCodingInfoKHR &operator=(VideoBeginCodingInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoBeginCodingInfoKHR &operator=(VkVideoBeginCodingInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR &setFlags(VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setCodecQualityPreset(VULKAN_HPP_NAMESPACE::VideoCodingQualityPresetFlagsKHR codecQualityPreset_) VULKAN_HPP_NOEXCEPT { codecQualityPreset = codecQualityPreset_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR &setVideoSession(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_) VULKAN_HPP_NOEXCEPT { videoSession = videoSession_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setVideoSessionParameters(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_) VULKAN_HPP_NOEXCEPT { videoSessionParameters = videoSessionParameters_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR &setReferenceSlotCount(uint32_t referenceSlotCount_) VULKAN_HPP_NOEXCEPT { referenceSlotCount = referenceSlotCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setPReferenceSlots(const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR *pReferenceSlots_) VULKAN_HPP_NOEXCEPT { pReferenceSlots = pReferenceSlots_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoBeginCodingInfoKHR &setReferenceSlots( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &referenceSlots_) VULKAN_HPP_NOEXCEPT { referenceSlotCount = static_cast(referenceSlots_.size()); pReferenceSlots = referenceSlots_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoBeginCodingInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoBeginCodingInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, codecQualityPreset, videoSession, videoSessionParameters, referenceSlotCount, pReferenceSlots); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoBeginCodingInfoKHR const &) const = default; # else bool operator==(VideoBeginCodingInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (codecQualityPreset == rhs.codecQualityPreset) && (videoSession == rhs.videoSession) && (videoSessionParameters == rhs.videoSessionParameters) && (referenceSlotCount == rhs.referenceSlotCount) && (pReferenceSlots == rhs.pReferenceSlots); # endif } bool operator!=(VideoBeginCodingInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoBeginCodingInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags = {}; VULKAN_HPP_NAMESPACE::VideoCodingQualityPresetFlagsKHR codecQualityPreset = {}; VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession = {}; VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters = {}; uint32_t referenceSlotCount = {}; const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR *pReferenceSlots = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR) == sizeof(VkVideoBeginCodingInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoBeginCodingInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoBeginCodingInfoKHR; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoBindMemoryKHR { using NativeType = VkVideoBindMemoryKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoBindMemoryKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoBindMemoryKHR(uint32_t memoryBindIndex_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memorySize_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), memoryBindIndex(memoryBindIndex_), memory(memory_), memoryOffset(memoryOffset_), memorySize(memorySize_) { } VULKAN_HPP_CONSTEXPR VideoBindMemoryKHR(VideoBindMemoryKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoBindMemoryKHR(VkVideoBindMemoryKHR const &rhs) VULKAN_HPP_NOEXCEPT : VideoBindMemoryKHR(*reinterpret_cast(&rhs)) {} # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoBindMemoryKHR &operator=(VideoBindMemoryKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoBindMemoryKHR &operator=(VkVideoBindMemoryKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoBindMemoryKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoBindMemoryKHR &setMemoryBindIndex(uint32_t memoryBindIndex_) VULKAN_HPP_NOEXCEPT { memoryBindIndex = memoryBindIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoBindMemoryKHR &setMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory_) VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoBindMemoryKHR &setMemoryOffset(VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_) VULKAN_HPP_NOEXCEPT { memoryOffset = memoryOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoBindMemoryKHR &setMemorySize(VULKAN_HPP_NAMESPACE::DeviceSize memorySize_) VULKAN_HPP_NOEXCEPT { memorySize = memorySize_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoBindMemoryKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoBindMemoryKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, memoryBindIndex, memory, memoryOffset, memorySize); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoBindMemoryKHR const &) const = default; # else bool operator==(VideoBindMemoryKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (memoryBindIndex == rhs.memoryBindIndex) && (memory == rhs.memory) && (memoryOffset == rhs.memoryOffset) && (memorySize == rhs.memorySize); # endif } bool operator!=(VideoBindMemoryKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoBindMemoryKHR; const void *pNext = {}; uint32_t memoryBindIndex = {}; VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; VULKAN_HPP_NAMESPACE::DeviceSize memorySize = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR) == sizeof(VkVideoBindMemoryKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoBindMemoryKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoBindMemoryKHR; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoCapabilitiesKHR { using NativeType = VkVideoCapabilitiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoCapabilitiesKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 VideoCapabilitiesKHR(VULKAN_HPP_NAMESPACE::VideoCapabilityFlagsKHR capabilityFlags_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferOffsetAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferSizeAlignment_ = {}, VULKAN_HPP_NAMESPACE::Extent2D videoPictureExtentGranularity_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxExtent_ = {}, uint32_t maxReferencePicturesSlotsCount_ = {}, uint32_t maxReferencePicturesActiveCount_ = {}, VULKAN_HPP_NAMESPACE::ExtensionProperties stdHeaderVersion_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), capabilityFlags(capabilityFlags_), minBitstreamBufferOffsetAlignment(minBitstreamBufferOffsetAlignment_), minBitstreamBufferSizeAlignment(minBitstreamBufferSizeAlignment_), videoPictureExtentGranularity(videoPictureExtentGranularity_), minExtent(minExtent_), maxExtent(maxExtent_), maxReferencePicturesSlotsCount(maxReferencePicturesSlotsCount_), maxReferencePicturesActiveCount(maxReferencePicturesActiveCount_), stdHeaderVersion(stdHeaderVersion_) { } VULKAN_HPP_CONSTEXPR_14 VideoCapabilitiesKHR(VideoCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoCapabilitiesKHR(VkVideoCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT : VideoCapabilitiesKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoCapabilitiesKHR &operator=(VideoCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoCapabilitiesKHR &operator=(VkVideoCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkVideoCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, capabilityFlags, minBitstreamBufferOffsetAlignment, minBitstreamBufferSizeAlignment, videoPictureExtentGranularity, minExtent, maxExtent, maxReferencePicturesSlotsCount, maxReferencePicturesActiveCount, stdHeaderVersion); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoCapabilitiesKHR const &) const = default; # else bool operator==(VideoCapabilitiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (capabilityFlags == rhs.capabilityFlags) && (minBitstreamBufferOffsetAlignment == rhs.minBitstreamBufferOffsetAlignment) && (minBitstreamBufferSizeAlignment == rhs.minBitstreamBufferSizeAlignment) && (videoPictureExtentGranularity == rhs.videoPictureExtentGranularity) && (minExtent == rhs.minExtent) && (maxExtent == rhs.maxExtent) && (maxReferencePicturesSlotsCount == rhs.maxReferencePicturesSlotsCount) && (maxReferencePicturesActiveCount == rhs.maxReferencePicturesActiveCount) && (stdHeaderVersion == rhs.stdHeaderVersion); # endif } bool operator!=(VideoCapabilitiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoCapabilitiesKHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::VideoCapabilityFlagsKHR capabilityFlags = {}; VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferOffsetAlignment = {}; VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferSizeAlignment = {}; VULKAN_HPP_NAMESPACE::Extent2D videoPictureExtentGranularity = {}; VULKAN_HPP_NAMESPACE::Extent2D minExtent = {}; VULKAN_HPP_NAMESPACE::Extent2D maxExtent = {}; uint32_t maxReferencePicturesSlotsCount = {}; uint32_t maxReferencePicturesActiveCount = {}; VULKAN_HPP_NAMESPACE::ExtensionProperties stdHeaderVersion = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR) == sizeof(VkVideoCapabilitiesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoCapabilitiesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoCapabilitiesKHR; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoCodingControlInfoKHR { using NativeType = VkVideoCodingControlInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoCodingControlInfoKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoCodingControlInfoKHR(VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_) { } VULKAN_HPP_CONSTEXPR VideoCodingControlInfoKHR(VideoCodingControlInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoCodingControlInfoKHR(VkVideoCodingControlInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : VideoCodingControlInfoKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoCodingControlInfoKHR &operator=(VideoCodingControlInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoCodingControlInfoKHR &operator=(VkVideoCodingControlInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoCodingControlInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoCodingControlInfoKHR &setFlags(VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoCodingControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoCodingControlInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoCodingControlInfoKHR const &) const = default; # else bool operator==(VideoCodingControlInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags); # endif } bool operator!=(VideoCodingControlInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoCodingControlInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR) == sizeof(VkVideoCodingControlInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoCodingControlInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoCodingControlInfoKHR; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoDecodeCapabilitiesKHR { using NativeType = VkVideoDecodeCapabilitiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeCapabilitiesKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoDecodeCapabilitiesKHR(VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagsKHR flags_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_) { } VULKAN_HPP_CONSTEXPR VideoDecodeCapabilitiesKHR(VideoDecodeCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoDecodeCapabilitiesKHR(VkVideoDecodeCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT : VideoDecodeCapabilitiesKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoDecodeCapabilitiesKHR &operator=(VideoDecodeCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoDecodeCapabilitiesKHR &operator=(VkVideoDecodeCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkVideoDecodeCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoDecodeCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoDecodeCapabilitiesKHR const &) const = default; # else bool operator==(VideoDecodeCapabilitiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags); # endif } bool operator!=(VideoDecodeCapabilitiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeCapabilitiesKHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagsKHR flags = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR) == sizeof(VkVideoDecodeCapabilitiesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoDecodeCapabilitiesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoDecodeCapabilitiesKHR; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoDecodeH264CapabilitiesEXT { using NativeType = VkVideoDecodeH264CapabilitiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264CapabilitiesEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoDecodeH264CapabilitiesEXT(StdVideoH264Level maxLevel_ = {}, VULKAN_HPP_NAMESPACE::Offset2D fieldOffsetGranularity_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), maxLevel(maxLevel_), fieldOffsetGranularity(fieldOffsetGranularity_) { } VULKAN_HPP_CONSTEXPR VideoDecodeH264CapabilitiesEXT(VideoDecodeH264CapabilitiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoDecodeH264CapabilitiesEXT(VkVideoDecodeH264CapabilitiesEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoDecodeH264CapabilitiesEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoDecodeH264CapabilitiesEXT &operator=(VideoDecodeH264CapabilitiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoDecodeH264CapabilitiesEXT &operator=(VkVideoDecodeH264CapabilitiesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkVideoDecodeH264CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoDecodeH264CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, maxLevel, fieldOffsetGranularity); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) std::strong_ordering operator<=>(VideoDecodeH264CapabilitiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { if(auto cmp = sType <=> rhs.sType; cmp != 0) return cmp; if(auto cmp = pNext <=> rhs.pNext; cmp != 0) return cmp; if(auto cmp = memcmp(&maxLevel, &rhs.maxLevel, sizeof(StdVideoH264Level)); cmp != 0) return (cmp < 0) ? std::strong_ordering::less : std::strong_ordering::greater; if(auto cmp = fieldOffsetGranularity <=> rhs.fieldOffsetGranularity; cmp != 0) return cmp; return std::strong_ordering::equivalent; } # endif bool operator==(VideoDecodeH264CapabilitiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return (sType == rhs.sType) && (pNext == rhs.pNext) && (memcmp(&maxLevel, &rhs.maxLevel, sizeof(StdVideoH264Level)) == 0) && (fieldOffsetGranularity == rhs.fieldOffsetGranularity); } bool operator!=(VideoDecodeH264CapabilitiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264CapabilitiesEXT; void *pNext = {}; StdVideoH264Level maxLevel = {}; VULKAN_HPP_NAMESPACE::Offset2D fieldOffsetGranularity = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesEXT) == sizeof(VkVideoDecodeH264CapabilitiesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoDecodeH264CapabilitiesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoDecodeH264CapabilitiesEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoDecodeH264DpbSlotInfoEXT { using NativeType = VkVideoDecodeH264DpbSlotInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264DpbSlotInfoEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoDecodeH264DpbSlotInfoEXT(const StdVideoDecodeH264ReferenceInfo *pStdReferenceInfo_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), pStdReferenceInfo(pStdReferenceInfo_) { } VULKAN_HPP_CONSTEXPR VideoDecodeH264DpbSlotInfoEXT(VideoDecodeH264DpbSlotInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoDecodeH264DpbSlotInfoEXT(VkVideoDecodeH264DpbSlotInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoDecodeH264DpbSlotInfoEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoDecodeH264DpbSlotInfoEXT &operator=(VideoDecodeH264DpbSlotInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoDecodeH264DpbSlotInfoEXT &operator=(VkVideoDecodeH264DpbSlotInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264DpbSlotInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264DpbSlotInfoEXT & setPStdReferenceInfo(const StdVideoDecodeH264ReferenceInfo *pStdReferenceInfo_) VULKAN_HPP_NOEXCEPT { pStdReferenceInfo = pStdReferenceInfo_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoDecodeH264DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoDecodeH264DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, pStdReferenceInfo); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoDecodeH264DpbSlotInfoEXT const &) const = default; # else bool operator==(VideoDecodeH264DpbSlotInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (pStdReferenceInfo == rhs.pStdReferenceInfo); # endif } bool operator!=(VideoDecodeH264DpbSlotInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264DpbSlotInfoEXT; const void *pNext = {}; const StdVideoDecodeH264ReferenceInfo *pStdReferenceInfo = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoEXT) == sizeof(VkVideoDecodeH264DpbSlotInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoDecodeH264DpbSlotInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoDecodeH264DpbSlotInfoEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoDecodeH264MvcEXT { using NativeType = VkVideoDecodeH264MvcEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264MvcEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoDecodeH264MvcEXT(const StdVideoDecodeH264Mvc *pStdMvc_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), pStdMvc(pStdMvc_) { } VULKAN_HPP_CONSTEXPR VideoDecodeH264MvcEXT(VideoDecodeH264MvcEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoDecodeH264MvcEXT(VkVideoDecodeH264MvcEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoDecodeH264MvcEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoDecodeH264MvcEXT &operator=(VideoDecodeH264MvcEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoDecodeH264MvcEXT &operator=(VkVideoDecodeH264MvcEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264MvcEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264MvcEXT &setPStdMvc(const StdVideoDecodeH264Mvc *pStdMvc_) VULKAN_HPP_NOEXCEPT { pStdMvc = pStdMvc_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoDecodeH264MvcEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoDecodeH264MvcEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, pStdMvc); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoDecodeH264MvcEXT const &) const = default; # else bool operator==(VideoDecodeH264MvcEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (pStdMvc == rhs.pStdMvc); # endif } bool operator!=(VideoDecodeH264MvcEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264MvcEXT; const void *pNext = {}; const StdVideoDecodeH264Mvc *pStdMvc = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoDecodeH264MvcEXT) == sizeof(VkVideoDecodeH264MvcEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoDecodeH264MvcEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoDecodeH264MvcEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoDecodeH264PictureInfoEXT { using NativeType = VkVideoDecodeH264PictureInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264PictureInfoEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureInfoEXT(const StdVideoDecodeH264PictureInfo *pStdPictureInfo_ = {}, uint32_t slicesCount_ = {}, const uint32_t *pSlicesDataOffsets_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), pStdPictureInfo(pStdPictureInfo_), slicesCount(slicesCount_), pSlicesDataOffsets(pSlicesDataOffsets_) { } VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureInfoEXT(VideoDecodeH264PictureInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoDecodeH264PictureInfoEXT(VkVideoDecodeH264PictureInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoDecodeH264PictureInfoEXT(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoDecodeH264PictureInfoEXT(const StdVideoDecodeH264PictureInfo *pStdPictureInfo_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &slicesDataOffsets_, const void *pNext_ = nullptr) : pNext(pNext_) , pStdPictureInfo(pStdPictureInfo_) , slicesCount(static_cast(slicesDataOffsets_.size())) , pSlicesDataOffsets(slicesDataOffsets_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoDecodeH264PictureInfoEXT &operator=(VideoDecodeH264PictureInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoDecodeH264PictureInfoEXT &operator=(VkVideoDecodeH264PictureInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoEXT &setPStdPictureInfo(const StdVideoDecodeH264PictureInfo *pStdPictureInfo_) VULKAN_HPP_NOEXCEPT { pStdPictureInfo = pStdPictureInfo_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoEXT &setSlicesCount(uint32_t slicesCount_) VULKAN_HPP_NOEXCEPT { slicesCount = slicesCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoEXT &setPSlicesDataOffsets(const uint32_t *pSlicesDataOffsets_) VULKAN_HPP_NOEXCEPT { pSlicesDataOffsets = pSlicesDataOffsets_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoDecodeH264PictureInfoEXT & setSlicesDataOffsets(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &slicesDataOffsets_) VULKAN_HPP_NOEXCEPT { slicesCount = static_cast(slicesDataOffsets_.size()); pSlicesDataOffsets = slicesDataOffsets_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoDecodeH264PictureInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoDecodeH264PictureInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, pStdPictureInfo, slicesCount, pSlicesDataOffsets); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoDecodeH264PictureInfoEXT const &) const = default; # else bool operator==(VideoDecodeH264PictureInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (pStdPictureInfo == rhs.pStdPictureInfo) && (slicesCount == rhs.slicesCount) && (pSlicesDataOffsets == rhs.pSlicesDataOffsets); # endif } bool operator!=(VideoDecodeH264PictureInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264PictureInfoEXT; const void *pNext = {}; const StdVideoDecodeH264PictureInfo *pStdPictureInfo = {}; uint32_t slicesCount = {}; const uint32_t *pSlicesDataOffsets = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoEXT) == sizeof(VkVideoDecodeH264PictureInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoDecodeH264PictureInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoDecodeH264PictureInfoEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoDecodeH264ProfileEXT { using NativeType = VkVideoDecodeH264ProfileEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264ProfileEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoDecodeH264ProfileEXT(StdVideoH264ProfileIdc stdProfileIdc_ = {}, VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagsEXT pictureLayout_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), stdProfileIdc(stdProfileIdc_), pictureLayout(pictureLayout_) { } VULKAN_HPP_CONSTEXPR VideoDecodeH264ProfileEXT(VideoDecodeH264ProfileEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoDecodeH264ProfileEXT(VkVideoDecodeH264ProfileEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoDecodeH264ProfileEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoDecodeH264ProfileEXT &operator=(VideoDecodeH264ProfileEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoDecodeH264ProfileEXT &operator=(VkVideoDecodeH264ProfileEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileEXT &setStdProfileIdc(StdVideoH264ProfileIdc stdProfileIdc_) VULKAN_HPP_NOEXCEPT { stdProfileIdc = stdProfileIdc_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileEXT & setPictureLayout(VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagsEXT pictureLayout_) VULKAN_HPP_NOEXCEPT { pictureLayout = pictureLayout_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoDecodeH264ProfileEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoDecodeH264ProfileEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, stdProfileIdc, pictureLayout); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) std::strong_ordering operator<=>(VideoDecodeH264ProfileEXT const &rhs) const VULKAN_HPP_NOEXCEPT { if(auto cmp = sType <=> rhs.sType; cmp != 0) return cmp; if(auto cmp = pNext <=> rhs.pNext; cmp != 0) return cmp; if(auto cmp = memcmp(&stdProfileIdc, &rhs.stdProfileIdc, sizeof(StdVideoH264ProfileIdc)); cmp != 0) return (cmp < 0) ? std::strong_ordering::less : std::strong_ordering::greater; if(auto cmp = pictureLayout <=> rhs.pictureLayout; cmp != 0) return cmp; return std::strong_ordering::equivalent; } # endif bool operator==(VideoDecodeH264ProfileEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return (sType == rhs.sType) && (pNext == rhs.pNext) && (memcmp(&stdProfileIdc, &rhs.stdProfileIdc, sizeof(StdVideoH264ProfileIdc)) == 0) && (pictureLayout == rhs.pictureLayout); } bool operator!=(VideoDecodeH264ProfileEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264ProfileEXT; const void *pNext = {}; StdVideoH264ProfileIdc stdProfileIdc = {}; VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagsEXT pictureLayout = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileEXT) == sizeof(VkVideoDecodeH264ProfileEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoDecodeH264ProfileEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoDecodeH264ProfileEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoDecodeH264SessionParametersAddInfoEXT { using NativeType = VkVideoDecodeH264SessionParametersAddInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264SessionParametersAddInfoEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersAddInfoEXT(uint32_t spsStdCount_ = {}, const StdVideoH264SequenceParameterSet *pSpsStd_ = {}, uint32_t ppsStdCount_ = {}, const StdVideoH264PictureParameterSet *pPpsStd_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), spsStdCount(spsStdCount_), pSpsStd(pSpsStd_), ppsStdCount(ppsStdCount_), pPpsStd(pPpsStd_) { } VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersAddInfoEXT(VideoDecodeH264SessionParametersAddInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoDecodeH264SessionParametersAddInfoEXT(VkVideoDecodeH264SessionParametersAddInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoDecodeH264SessionParametersAddInfoEXT(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoDecodeH264SessionParametersAddInfoEXT(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &spsStd_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &ppsStd_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , spsStdCount(static_cast(spsStd_.size())) , pSpsStd(spsStd_.data()) , ppsStdCount(static_cast(ppsStd_.size())) , pPpsStd(ppsStd_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoDecodeH264SessionParametersAddInfoEXT &operator=(VideoDecodeH264SessionParametersAddInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoDecodeH264SessionParametersAddInfoEXT &operator=(VkVideoDecodeH264SessionParametersAddInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT &setSpsStdCount(uint32_t spsStdCount_) VULKAN_HPP_NOEXCEPT { spsStdCount = spsStdCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT &setPSpsStd(const StdVideoH264SequenceParameterSet *pSpsStd_) VULKAN_HPP_NOEXCEPT { pSpsStd = pSpsStd_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoDecodeH264SessionParametersAddInfoEXT & setSpsStd(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &spsStd_) VULKAN_HPP_NOEXCEPT { spsStdCount = static_cast(spsStd_.size()); pSpsStd = spsStd_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT &setPpsStdCount(uint32_t ppsStdCount_) VULKAN_HPP_NOEXCEPT { ppsStdCount = ppsStdCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT &setPPpsStd(const StdVideoH264PictureParameterSet *pPpsStd_) VULKAN_HPP_NOEXCEPT { pPpsStd = pPpsStd_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoDecodeH264SessionParametersAddInfoEXT & setPpsStd(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &ppsStd_) VULKAN_HPP_NOEXCEPT { ppsStdCount = static_cast(ppsStd_.size()); pPpsStd = ppsStd_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoDecodeH264SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoDecodeH264SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, spsStdCount, pSpsStd, ppsStdCount, pPpsStd); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoDecodeH264SessionParametersAddInfoEXT const &) const = default; # else bool operator==(VideoDecodeH264SessionParametersAddInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (spsStdCount == rhs.spsStdCount) && (pSpsStd == rhs.pSpsStd) && (ppsStdCount == rhs.ppsStdCount) && (pPpsStd == rhs.pPpsStd); # endif } bool operator!=(VideoDecodeH264SessionParametersAddInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264SessionParametersAddInfoEXT; const void *pNext = {}; uint32_t spsStdCount = {}; const StdVideoH264SequenceParameterSet *pSpsStd = {}; uint32_t ppsStdCount = {}; const StdVideoH264PictureParameterSet *pPpsStd = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT) == sizeof(VkVideoDecodeH264SessionParametersAddInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoDecodeH264SessionParametersAddInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoDecodeH264SessionParametersAddInfoEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoDecodeH264SessionParametersCreateInfoEXT { using NativeType = VkVideoDecodeH264SessionParametersCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264SessionParametersCreateInfoEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersCreateInfoEXT(uint32_t maxSpsStdCount_ = {}, uint32_t maxPpsStdCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT *pParametersAddInfo_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), maxSpsStdCount(maxSpsStdCount_), maxPpsStdCount(maxPpsStdCount_), pParametersAddInfo(pParametersAddInfo_) { } VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersCreateInfoEXT(VideoDecodeH264SessionParametersCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoDecodeH264SessionParametersCreateInfoEXT(VkVideoDecodeH264SessionParametersCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoDecodeH264SessionParametersCreateInfoEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoDecodeH264SessionParametersCreateInfoEXT &operator=(VideoDecodeH264SessionParametersCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoDecodeH264SessionParametersCreateInfoEXT &operator=(VkVideoDecodeH264SessionParametersCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoEXT &setMaxSpsStdCount(uint32_t maxSpsStdCount_) VULKAN_HPP_NOEXCEPT { maxSpsStdCount = maxSpsStdCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoEXT &setMaxPpsStdCount(uint32_t maxPpsStdCount_) VULKAN_HPP_NOEXCEPT { maxPpsStdCount = maxPpsStdCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoEXT & setPParametersAddInfo(const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT *pParametersAddInfo_) VULKAN_HPP_NOEXCEPT { pParametersAddInfo = pParametersAddInfo_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoDecodeH264SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoDecodeH264SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, maxSpsStdCount, maxPpsStdCount, pParametersAddInfo); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoDecodeH264SessionParametersCreateInfoEXT const &) const = default; # else bool operator==(VideoDecodeH264SessionParametersCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (maxSpsStdCount == rhs.maxSpsStdCount) && (maxPpsStdCount == rhs.maxPpsStdCount) && (pParametersAddInfo == rhs.pParametersAddInfo); # endif } bool operator!=(VideoDecodeH264SessionParametersCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264SessionParametersCreateInfoEXT; const void *pNext = {}; uint32_t maxSpsStdCount = {}; uint32_t maxPpsStdCount = {}; const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT *pParametersAddInfo = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoEXT) == sizeof(VkVideoDecodeH264SessionParametersCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoDecodeH264SessionParametersCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoDecodeH264SessionParametersCreateInfoEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoDecodeH265CapabilitiesEXT { using NativeType = VkVideoDecodeH265CapabilitiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265CapabilitiesEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoDecodeH265CapabilitiesEXT(StdVideoH265Level maxLevel_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), maxLevel(maxLevel_) { } VULKAN_HPP_CONSTEXPR VideoDecodeH265CapabilitiesEXT(VideoDecodeH265CapabilitiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoDecodeH265CapabilitiesEXT(VkVideoDecodeH265CapabilitiesEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoDecodeH265CapabilitiesEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoDecodeH265CapabilitiesEXT &operator=(VideoDecodeH265CapabilitiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoDecodeH265CapabilitiesEXT &operator=(VkVideoDecodeH265CapabilitiesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkVideoDecodeH265CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoDecodeH265CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, maxLevel); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) std::strong_ordering operator<=>(VideoDecodeH265CapabilitiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { if(auto cmp = sType <=> rhs.sType; cmp != 0) return cmp; if(auto cmp = pNext <=> rhs.pNext; cmp != 0) return cmp; if(auto cmp = memcmp(&maxLevel, &rhs.maxLevel, sizeof(StdVideoH265Level)); cmp != 0) return (cmp < 0) ? std::strong_ordering::less : std::strong_ordering::greater; return std::strong_ordering::equivalent; } # endif bool operator==(VideoDecodeH265CapabilitiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return (sType == rhs.sType) && (pNext == rhs.pNext) && (memcmp(&maxLevel, &rhs.maxLevel, sizeof(StdVideoH265Level)) == 0); } bool operator!=(VideoDecodeH265CapabilitiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265CapabilitiesEXT; void *pNext = {}; StdVideoH265Level maxLevel = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesEXT) == sizeof(VkVideoDecodeH265CapabilitiesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoDecodeH265CapabilitiesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoDecodeH265CapabilitiesEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoDecodeH265DpbSlotInfoEXT { using NativeType = VkVideoDecodeH265DpbSlotInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265DpbSlotInfoEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoDecodeH265DpbSlotInfoEXT(const StdVideoDecodeH265ReferenceInfo *pStdReferenceInfo_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), pStdReferenceInfo(pStdReferenceInfo_) { } VULKAN_HPP_CONSTEXPR VideoDecodeH265DpbSlotInfoEXT(VideoDecodeH265DpbSlotInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoDecodeH265DpbSlotInfoEXT(VkVideoDecodeH265DpbSlotInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoDecodeH265DpbSlotInfoEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoDecodeH265DpbSlotInfoEXT &operator=(VideoDecodeH265DpbSlotInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoDecodeH265DpbSlotInfoEXT &operator=(VkVideoDecodeH265DpbSlotInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265DpbSlotInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265DpbSlotInfoEXT & setPStdReferenceInfo(const StdVideoDecodeH265ReferenceInfo *pStdReferenceInfo_) VULKAN_HPP_NOEXCEPT { pStdReferenceInfo = pStdReferenceInfo_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoDecodeH265DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoDecodeH265DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, pStdReferenceInfo); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoDecodeH265DpbSlotInfoEXT const &) const = default; # else bool operator==(VideoDecodeH265DpbSlotInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (pStdReferenceInfo == rhs.pStdReferenceInfo); # endif } bool operator!=(VideoDecodeH265DpbSlotInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265DpbSlotInfoEXT; const void *pNext = {}; const StdVideoDecodeH265ReferenceInfo *pStdReferenceInfo = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoEXT) == sizeof(VkVideoDecodeH265DpbSlotInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoDecodeH265DpbSlotInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoDecodeH265DpbSlotInfoEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoDecodeH265PictureInfoEXT { using NativeType = VkVideoDecodeH265PictureInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265PictureInfoEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoDecodeH265PictureInfoEXT(StdVideoDecodeH265PictureInfo *pStdPictureInfo_ = {}, uint32_t slicesCount_ = {}, const uint32_t *pSlicesDataOffsets_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), pStdPictureInfo(pStdPictureInfo_), slicesCount(slicesCount_), pSlicesDataOffsets(pSlicesDataOffsets_) { } VULKAN_HPP_CONSTEXPR VideoDecodeH265PictureInfoEXT(VideoDecodeH265PictureInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoDecodeH265PictureInfoEXT(VkVideoDecodeH265PictureInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoDecodeH265PictureInfoEXT(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoDecodeH265PictureInfoEXT(StdVideoDecodeH265PictureInfo *pStdPictureInfo_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &slicesDataOffsets_, const void *pNext_ = nullptr) : pNext(pNext_) , pStdPictureInfo(pStdPictureInfo_) , slicesCount(static_cast(slicesDataOffsets_.size())) , pSlicesDataOffsets(slicesDataOffsets_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoDecodeH265PictureInfoEXT &operator=(VideoDecodeH265PictureInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoDecodeH265PictureInfoEXT &operator=(VkVideoDecodeH265PictureInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoEXT &setPStdPictureInfo(StdVideoDecodeH265PictureInfo *pStdPictureInfo_) VULKAN_HPP_NOEXCEPT { pStdPictureInfo = pStdPictureInfo_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoEXT &setSlicesCount(uint32_t slicesCount_) VULKAN_HPP_NOEXCEPT { slicesCount = slicesCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoEXT &setPSlicesDataOffsets(const uint32_t *pSlicesDataOffsets_) VULKAN_HPP_NOEXCEPT { pSlicesDataOffsets = pSlicesDataOffsets_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoDecodeH265PictureInfoEXT & setSlicesDataOffsets(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &slicesDataOffsets_) VULKAN_HPP_NOEXCEPT { slicesCount = static_cast(slicesDataOffsets_.size()); pSlicesDataOffsets = slicesDataOffsets_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoDecodeH265PictureInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoDecodeH265PictureInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, pStdPictureInfo, slicesCount, pSlicesDataOffsets); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoDecodeH265PictureInfoEXT const &) const = default; # else bool operator==(VideoDecodeH265PictureInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (pStdPictureInfo == rhs.pStdPictureInfo) && (slicesCount == rhs.slicesCount) && (pSlicesDataOffsets == rhs.pSlicesDataOffsets); # endif } bool operator!=(VideoDecodeH265PictureInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265PictureInfoEXT; const void *pNext = {}; StdVideoDecodeH265PictureInfo *pStdPictureInfo = {}; uint32_t slicesCount = {}; const uint32_t *pSlicesDataOffsets = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoEXT) == sizeof(VkVideoDecodeH265PictureInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoDecodeH265PictureInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoDecodeH265PictureInfoEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoDecodeH265ProfileEXT { using NativeType = VkVideoDecodeH265ProfileEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265ProfileEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoDecodeH265ProfileEXT(StdVideoH265ProfileIdc stdProfileIdc_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), stdProfileIdc(stdProfileIdc_) { } VULKAN_HPP_CONSTEXPR VideoDecodeH265ProfileEXT(VideoDecodeH265ProfileEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoDecodeH265ProfileEXT(VkVideoDecodeH265ProfileEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoDecodeH265ProfileEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoDecodeH265ProfileEXT &operator=(VideoDecodeH265ProfileEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoDecodeH265ProfileEXT &operator=(VkVideoDecodeH265ProfileEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265ProfileEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265ProfileEXT &setStdProfileIdc(StdVideoH265ProfileIdc stdProfileIdc_) VULKAN_HPP_NOEXCEPT { stdProfileIdc = stdProfileIdc_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoDecodeH265ProfileEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoDecodeH265ProfileEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, stdProfileIdc); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) std::strong_ordering operator<=>(VideoDecodeH265ProfileEXT const &rhs) const VULKAN_HPP_NOEXCEPT { if(auto cmp = sType <=> rhs.sType; cmp != 0) return cmp; if(auto cmp = pNext <=> rhs.pNext; cmp != 0) return cmp; if(auto cmp = memcmp(&stdProfileIdc, &rhs.stdProfileIdc, sizeof(StdVideoH265ProfileIdc)); cmp != 0) return (cmp < 0) ? std::strong_ordering::less : std::strong_ordering::greater; return std::strong_ordering::equivalent; } # endif bool operator==(VideoDecodeH265ProfileEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return (sType == rhs.sType) && (pNext == rhs.pNext) && (memcmp(&stdProfileIdc, &rhs.stdProfileIdc, sizeof(StdVideoH265ProfileIdc)) == 0); } bool operator!=(VideoDecodeH265ProfileEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265ProfileEXT; const void *pNext = {}; StdVideoH265ProfileIdc stdProfileIdc = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileEXT) == sizeof(VkVideoDecodeH265ProfileEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoDecodeH265ProfileEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoDecodeH265ProfileEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoDecodeH265SessionParametersAddInfoEXT { using NativeType = VkVideoDecodeH265SessionParametersAddInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265SessionParametersAddInfoEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersAddInfoEXT(uint32_t vpsStdCount_ = {}, const StdVideoH265VideoParameterSet *pVpsStd_ = {}, uint32_t spsStdCount_ = {}, const StdVideoH265SequenceParameterSet *pSpsStd_ = {}, uint32_t ppsStdCount_ = {}, const StdVideoH265PictureParameterSet *pPpsStd_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), vpsStdCount(vpsStdCount_), pVpsStd(pVpsStd_), spsStdCount(spsStdCount_), pSpsStd(pSpsStd_), ppsStdCount(ppsStdCount_), pPpsStd(pPpsStd_) { } VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersAddInfoEXT(VideoDecodeH265SessionParametersAddInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoDecodeH265SessionParametersAddInfoEXT(VkVideoDecodeH265SessionParametersAddInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoDecodeH265SessionParametersAddInfoEXT(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoDecodeH265SessionParametersAddInfoEXT(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &vpsStd_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &spsStd_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &ppsStd_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , vpsStdCount(static_cast(vpsStd_.size())) , pVpsStd(vpsStd_.data()) , spsStdCount(static_cast(spsStd_.size())) , pSpsStd(spsStd_.data()) , ppsStdCount(static_cast(ppsStd_.size())) , pPpsStd(ppsStd_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoDecodeH265SessionParametersAddInfoEXT &operator=(VideoDecodeH265SessionParametersAddInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoDecodeH265SessionParametersAddInfoEXT &operator=(VkVideoDecodeH265SessionParametersAddInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT &setVpsStdCount(uint32_t vpsStdCount_) VULKAN_HPP_NOEXCEPT { vpsStdCount = vpsStdCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT &setPVpsStd(const StdVideoH265VideoParameterSet *pVpsStd_) VULKAN_HPP_NOEXCEPT { pVpsStd = pVpsStd_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoDecodeH265SessionParametersAddInfoEXT & setVpsStd(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &vpsStd_) VULKAN_HPP_NOEXCEPT { vpsStdCount = static_cast(vpsStd_.size()); pVpsStd = vpsStd_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT &setSpsStdCount(uint32_t spsStdCount_) VULKAN_HPP_NOEXCEPT { spsStdCount = spsStdCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT &setPSpsStd(const StdVideoH265SequenceParameterSet *pSpsStd_) VULKAN_HPP_NOEXCEPT { pSpsStd = pSpsStd_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoDecodeH265SessionParametersAddInfoEXT & setSpsStd(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &spsStd_) VULKAN_HPP_NOEXCEPT { spsStdCount = static_cast(spsStd_.size()); pSpsStd = spsStd_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT &setPpsStdCount(uint32_t ppsStdCount_) VULKAN_HPP_NOEXCEPT { ppsStdCount = ppsStdCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT &setPPpsStd(const StdVideoH265PictureParameterSet *pPpsStd_) VULKAN_HPP_NOEXCEPT { pPpsStd = pPpsStd_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoDecodeH265SessionParametersAddInfoEXT & setPpsStd(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &ppsStd_) VULKAN_HPP_NOEXCEPT { ppsStdCount = static_cast(ppsStd_.size()); pPpsStd = ppsStd_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoDecodeH265SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoDecodeH265SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, vpsStdCount, pVpsStd, spsStdCount, pSpsStd, ppsStdCount, pPpsStd); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoDecodeH265SessionParametersAddInfoEXT const &) const = default; # else bool operator==(VideoDecodeH265SessionParametersAddInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (vpsStdCount == rhs.vpsStdCount) && (pVpsStd == rhs.pVpsStd) && (spsStdCount == rhs.spsStdCount) && (pSpsStd == rhs.pSpsStd) && (ppsStdCount == rhs.ppsStdCount) && (pPpsStd == rhs.pPpsStd); # endif } bool operator!=(VideoDecodeH265SessionParametersAddInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265SessionParametersAddInfoEXT; const void *pNext = {}; uint32_t vpsStdCount = {}; const StdVideoH265VideoParameterSet *pVpsStd = {}; uint32_t spsStdCount = {}; const StdVideoH265SequenceParameterSet *pSpsStd = {}; uint32_t ppsStdCount = {}; const StdVideoH265PictureParameterSet *pPpsStd = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT) == sizeof(VkVideoDecodeH265SessionParametersAddInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoDecodeH265SessionParametersAddInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoDecodeH265SessionParametersAddInfoEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoDecodeH265SessionParametersCreateInfoEXT { using NativeType = VkVideoDecodeH265SessionParametersCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265SessionParametersCreateInfoEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersCreateInfoEXT(uint32_t maxVpsStdCount_ = {}, uint32_t maxSpsStdCount_ = {}, uint32_t maxPpsStdCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT *pParametersAddInfo_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), maxVpsStdCount(maxVpsStdCount_), maxSpsStdCount(maxSpsStdCount_), maxPpsStdCount(maxPpsStdCount_), pParametersAddInfo(pParametersAddInfo_) { } VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersCreateInfoEXT(VideoDecodeH265SessionParametersCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoDecodeH265SessionParametersCreateInfoEXT(VkVideoDecodeH265SessionParametersCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoDecodeH265SessionParametersCreateInfoEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoDecodeH265SessionParametersCreateInfoEXT &operator=(VideoDecodeH265SessionParametersCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoDecodeH265SessionParametersCreateInfoEXT &operator=(VkVideoDecodeH265SessionParametersCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoEXT &setMaxVpsStdCount(uint32_t maxVpsStdCount_) VULKAN_HPP_NOEXCEPT { maxVpsStdCount = maxVpsStdCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoEXT &setMaxSpsStdCount(uint32_t maxSpsStdCount_) VULKAN_HPP_NOEXCEPT { maxSpsStdCount = maxSpsStdCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoEXT &setMaxPpsStdCount(uint32_t maxPpsStdCount_) VULKAN_HPP_NOEXCEPT { maxPpsStdCount = maxPpsStdCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoEXT & setPParametersAddInfo(const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT *pParametersAddInfo_) VULKAN_HPP_NOEXCEPT { pParametersAddInfo = pParametersAddInfo_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoDecodeH265SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoDecodeH265SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, maxVpsStdCount, maxSpsStdCount, maxPpsStdCount, pParametersAddInfo); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoDecodeH265SessionParametersCreateInfoEXT const &) const = default; # else bool operator==(VideoDecodeH265SessionParametersCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (maxVpsStdCount == rhs.maxVpsStdCount) && (maxSpsStdCount == rhs.maxSpsStdCount) && (maxPpsStdCount == rhs.maxPpsStdCount) && (pParametersAddInfo == rhs.pParametersAddInfo); # endif } bool operator!=(VideoDecodeH265SessionParametersCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265SessionParametersCreateInfoEXT; const void *pNext = {}; uint32_t maxVpsStdCount = {}; uint32_t maxSpsStdCount = {}; uint32_t maxPpsStdCount = {}; const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT *pParametersAddInfo = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoEXT) == sizeof(VkVideoDecodeH265SessionParametersCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoDecodeH265SessionParametersCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoDecodeH265SessionParametersCreateInfoEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoDecodeInfoKHR { using NativeType = VkVideoDecodeInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeInfoKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoDecodeInfoKHR(VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_ = {}, VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR dstPictureResource_ = {}, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR *pSetupReferenceSlot_ = {}, uint32_t referenceSlotCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR *pReferenceSlots_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), srcBuffer(srcBuffer_), srcBufferOffset(srcBufferOffset_), srcBufferRange(srcBufferRange_), dstPictureResource(dstPictureResource_), pSetupReferenceSlot(pSetupReferenceSlot_), referenceSlotCount(referenceSlotCount_), pReferenceSlots(pReferenceSlots_) { } VULKAN_HPP_CONSTEXPR VideoDecodeInfoKHR(VideoDecodeInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoDecodeInfoKHR(VkVideoDecodeInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : VideoDecodeInfoKHR(*reinterpret_cast(&rhs)) {} # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoDecodeInfoKHR(VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_, VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_, VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_, VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR dstPictureResource_, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR *pSetupReferenceSlot_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &referenceSlots_, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , srcBuffer(srcBuffer_) , srcBufferOffset(srcBufferOffset_) , srcBufferRange(srcBufferRange_) , dstPictureResource(dstPictureResource_) , pSetupReferenceSlot(pSetupReferenceSlot_) , referenceSlotCount(static_cast(referenceSlots_.size())) , pReferenceSlots(referenceSlots_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoDecodeInfoKHR &operator=(VideoDecodeInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoDecodeInfoKHR &operator=(VkVideoDecodeInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR &setFlags(VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR &setSrcBuffer(VULKAN_HPP_NAMESPACE::Buffer srcBuffer_) VULKAN_HPP_NOEXCEPT { srcBuffer = srcBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR &setSrcBufferOffset(VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_) VULKAN_HPP_NOEXCEPT { srcBufferOffset = srcBufferOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR &setSrcBufferRange(VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_) VULKAN_HPP_NOEXCEPT { srcBufferRange = srcBufferRange_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setDstPictureResource(VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR const &dstPictureResource_) VULKAN_HPP_NOEXCEPT { dstPictureResource = dstPictureResource_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setPSetupReferenceSlot(const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR *pSetupReferenceSlot_) VULKAN_HPP_NOEXCEPT { pSetupReferenceSlot = pSetupReferenceSlot_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR &setReferenceSlotCount(uint32_t referenceSlotCount_) VULKAN_HPP_NOEXCEPT { referenceSlotCount = referenceSlotCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR &setPReferenceSlots(const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR *pReferenceSlots_) VULKAN_HPP_NOEXCEPT { pReferenceSlots = pReferenceSlots_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoDecodeInfoKHR &setReferenceSlots( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &referenceSlots_) VULKAN_HPP_NOEXCEPT { referenceSlotCount = static_cast(referenceSlots_.size()); pReferenceSlots = referenceSlots_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoDecodeInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoDecodeInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, srcBuffer, srcBufferOffset, srcBufferRange, dstPictureResource, pSetupReferenceSlot, referenceSlotCount, pReferenceSlots); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoDecodeInfoKHR const &) const = default; # else bool operator==(VideoDecodeInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (srcBuffer == rhs.srcBuffer) && (srcBufferOffset == rhs.srcBufferOffset) && (srcBufferRange == rhs.srcBufferRange) && (dstPictureResource == rhs.dstPictureResource) && (pSetupReferenceSlot == rhs.pSetupReferenceSlot) && (referenceSlotCount == rhs.referenceSlotCount) && (pReferenceSlots == rhs.pReferenceSlots); # endif } bool operator!=(VideoDecodeInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags = {}; VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {}; VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset = {}; VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange = {}; VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR dstPictureResource = {}; const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR *pSetupReferenceSlot = {}; uint32_t referenceSlotCount = {}; const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR *pReferenceSlots = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR) == sizeof(VkVideoDecodeInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoDecodeInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoDecodeInfoKHR; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEncodeCapabilitiesKHR { using NativeType = VkVideoEncodeCapabilitiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeCapabilitiesKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEncodeCapabilitiesKHR(VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagsKHR rateControlModes_ = {}, uint8_t rateControlLayerCount_ = {}, uint8_t qualityLevelCount_ = {}, VULKAN_HPP_NAMESPACE::Extent2D inputImageDataFillAlignment_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), rateControlModes(rateControlModes_), rateControlLayerCount(rateControlLayerCount_), qualityLevelCount(qualityLevelCount_), inputImageDataFillAlignment(inputImageDataFillAlignment_) { } VULKAN_HPP_CONSTEXPR VideoEncodeCapabilitiesKHR(VideoEncodeCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeCapabilitiesKHR(VkVideoEncodeCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT : VideoEncodeCapabilitiesKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeCapabilitiesKHR &operator=(VideoEncodeCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeCapabilitiesKHR &operator=(VkVideoEncodeCapabilitiesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkVideoEncodeCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEncodeCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, rateControlModes, rateControlLayerCount, qualityLevelCount, inputImageDataFillAlignment); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoEncodeCapabilitiesKHR const &) const = default; # else bool operator==(VideoEncodeCapabilitiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (rateControlModes == rhs.rateControlModes) && (rateControlLayerCount == rhs.rateControlLayerCount) && (qualityLevelCount == rhs.qualityLevelCount) && (inputImageDataFillAlignment == rhs.inputImageDataFillAlignment); # endif } bool operator!=(VideoEncodeCapabilitiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeCapabilitiesKHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagsKHR flags = {}; VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagsKHR rateControlModes = {}; uint8_t rateControlLayerCount = {}; uint8_t qualityLevelCount = {}; VULKAN_HPP_NAMESPACE::Extent2D inputImageDataFillAlignment = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR) == sizeof(VkVideoEncodeCapabilitiesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEncodeCapabilitiesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoEncodeCapabilitiesKHR; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEncodeH264CapabilitiesEXT { using NativeType = VkVideoEncodeH264CapabilitiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264CapabilitiesEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilitiesEXT(VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH264InputModeFlagsEXT inputModeFlags_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH264OutputModeFlagsEXT outputModeFlags_ = {}, uint8_t maxPPictureL0ReferenceCount_ = {}, uint8_t maxBPictureL0ReferenceCount_ = {}, uint8_t maxL1ReferenceCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 motionVectorsOverPicBoundariesFlag_ = {}, uint32_t maxBytesPerPicDenom_ = {}, uint32_t maxBitsPerMbDenom_ = {}, uint32_t log2MaxMvLengthHorizontal_ = {}, uint32_t log2MaxMvLengthVertical_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), inputModeFlags(inputModeFlags_), outputModeFlags(outputModeFlags_), maxPPictureL0ReferenceCount(maxPPictureL0ReferenceCount_), maxBPictureL0ReferenceCount(maxBPictureL0ReferenceCount_), maxL1ReferenceCount(maxL1ReferenceCount_), motionVectorsOverPicBoundariesFlag(motionVectorsOverPicBoundariesFlag_), maxBytesPerPicDenom(maxBytesPerPicDenom_), maxBitsPerMbDenom(maxBitsPerMbDenom_), log2MaxMvLengthHorizontal(log2MaxMvLengthHorizontal_), log2MaxMvLengthVertical(log2MaxMvLengthVertical_) { } VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilitiesEXT(VideoEncodeH264CapabilitiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH264CapabilitiesEXT(VkVideoEncodeH264CapabilitiesEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoEncodeH264CapabilitiesEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeH264CapabilitiesEXT &operator=(VideoEncodeH264CapabilitiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH264CapabilitiesEXT &operator=(VkVideoEncodeH264CapabilitiesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkVideoEncodeH264CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEncodeH264CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, inputModeFlags, outputModeFlags, maxPPictureL0ReferenceCount, maxBPictureL0ReferenceCount, maxL1ReferenceCount, motionVectorsOverPicBoundariesFlag, maxBytesPerPicDenom, maxBitsPerMbDenom, log2MaxMvLengthHorizontal, log2MaxMvLengthVertical); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoEncodeH264CapabilitiesEXT const &) const = default; # else bool operator==(VideoEncodeH264CapabilitiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (inputModeFlags == rhs.inputModeFlags) && (outputModeFlags == rhs.outputModeFlags) && (maxPPictureL0ReferenceCount == rhs.maxPPictureL0ReferenceCount) && (maxBPictureL0ReferenceCount == rhs.maxBPictureL0ReferenceCount) && (maxL1ReferenceCount == rhs.maxL1ReferenceCount) && (motionVectorsOverPicBoundariesFlag == rhs.motionVectorsOverPicBoundariesFlag) && (maxBytesPerPicDenom == rhs.maxBytesPerPicDenom) && (maxBitsPerMbDenom == rhs.maxBitsPerMbDenom) && (log2MaxMvLengthHorizontal == rhs.log2MaxMvLengthHorizontal) && (log2MaxMvLengthVertical == rhs.log2MaxMvLengthVertical); # endif } bool operator!=(VideoEncodeH264CapabilitiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264CapabilitiesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsEXT flags = {}; VULKAN_HPP_NAMESPACE::VideoEncodeH264InputModeFlagsEXT inputModeFlags = {}; VULKAN_HPP_NAMESPACE::VideoEncodeH264OutputModeFlagsEXT outputModeFlags = {}; uint8_t maxPPictureL0ReferenceCount = {}; uint8_t maxBPictureL0ReferenceCount = {}; uint8_t maxL1ReferenceCount = {}; VULKAN_HPP_NAMESPACE::Bool32 motionVectorsOverPicBoundariesFlag = {}; uint32_t maxBytesPerPicDenom = {}; uint32_t maxBitsPerMbDenom = {}; uint32_t log2MaxMvLengthHorizontal = {}; uint32_t log2MaxMvLengthVertical = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT) == sizeof(VkVideoEncodeH264CapabilitiesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEncodeH264CapabilitiesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoEncodeH264CapabilitiesEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEncodeH264DpbSlotInfoEXT { using NativeType = VkVideoEncodeH264DpbSlotInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264DpbSlotInfoEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEncodeH264DpbSlotInfoEXT(int8_t slotIndex_ = {}, const StdVideoEncodeH264ReferenceInfo *pStdReferenceInfo_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), slotIndex(slotIndex_), pStdReferenceInfo(pStdReferenceInfo_) { } VULKAN_HPP_CONSTEXPR VideoEncodeH264DpbSlotInfoEXT(VideoEncodeH264DpbSlotInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH264DpbSlotInfoEXT(VkVideoEncodeH264DpbSlotInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoEncodeH264DpbSlotInfoEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeH264DpbSlotInfoEXT &operator=(VideoEncodeH264DpbSlotInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH264DpbSlotInfoEXT &operator=(VkVideoEncodeH264DpbSlotInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264DpbSlotInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264DpbSlotInfoEXT &setSlotIndex(int8_t slotIndex_) VULKAN_HPP_NOEXCEPT { slotIndex = slotIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264DpbSlotInfoEXT & setPStdReferenceInfo(const StdVideoEncodeH264ReferenceInfo *pStdReferenceInfo_) VULKAN_HPP_NOEXCEPT { pStdReferenceInfo = pStdReferenceInfo_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoEncodeH264DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEncodeH264DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, slotIndex, pStdReferenceInfo); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoEncodeH264DpbSlotInfoEXT const &) const = default; # else bool operator==(VideoEncodeH264DpbSlotInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (slotIndex == rhs.slotIndex) && (pStdReferenceInfo == rhs.pStdReferenceInfo); # endif } bool operator!=(VideoEncodeH264DpbSlotInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264DpbSlotInfoEXT; const void *pNext = {}; int8_t slotIndex = {}; const StdVideoEncodeH264ReferenceInfo *pStdReferenceInfo = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT) == sizeof(VkVideoEncodeH264DpbSlotInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEncodeH264DpbSlotInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoEncodeH264DpbSlotInfoEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEncodeH264EmitPictureParametersEXT { using NativeType = VkVideoEncodeH264EmitPictureParametersEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264EmitPictureParametersEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEncodeH264EmitPictureParametersEXT(uint8_t spsId_ = {}, VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ = {}, uint32_t ppsIdEntryCount_ = {}, const uint8_t *ppsIdEntries_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), spsId(spsId_), emitSpsEnable(emitSpsEnable_), ppsIdEntryCount(ppsIdEntryCount_), ppsIdEntries(ppsIdEntries_) { } VULKAN_HPP_CONSTEXPR VideoEncodeH264EmitPictureParametersEXT(VideoEncodeH264EmitPictureParametersEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH264EmitPictureParametersEXT(VkVideoEncodeH264EmitPictureParametersEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoEncodeH264EmitPictureParametersEXT(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoEncodeH264EmitPictureParametersEXT(uint8_t spsId_, VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &psIdEntries_, const void *pNext_ = nullptr) : pNext(pNext_) , spsId(spsId_) , emitSpsEnable(emitSpsEnable_) , ppsIdEntryCount(static_cast(psIdEntries_.size())) , ppsIdEntries(psIdEntries_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeH264EmitPictureParametersEXT &operator=(VideoEncodeH264EmitPictureParametersEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH264EmitPictureParametersEXT &operator=(VkVideoEncodeH264EmitPictureParametersEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersEXT &setSpsId(uint8_t spsId_) VULKAN_HPP_NOEXCEPT { spsId = spsId_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersEXT &setEmitSpsEnable(VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_) VULKAN_HPP_NOEXCEPT { emitSpsEnable = emitSpsEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersEXT &setPpsIdEntryCount(uint32_t ppsIdEntryCount_) VULKAN_HPP_NOEXCEPT { ppsIdEntryCount = ppsIdEntryCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersEXT &setPpsIdEntries(const uint8_t *ppsIdEntries_) VULKAN_HPP_NOEXCEPT { ppsIdEntries = ppsIdEntries_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoEncodeH264EmitPictureParametersEXT & setPsIdEntries(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &psIdEntries_) VULKAN_HPP_NOEXCEPT { ppsIdEntryCount = static_cast(psIdEntries_.size()); ppsIdEntries = psIdEntries_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoEncodeH264EmitPictureParametersEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEncodeH264EmitPictureParametersEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, spsId, emitSpsEnable, ppsIdEntryCount, ppsIdEntries); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoEncodeH264EmitPictureParametersEXT const &) const = default; # else bool operator==(VideoEncodeH264EmitPictureParametersEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (spsId == rhs.spsId) && (emitSpsEnable == rhs.emitSpsEnable) && (ppsIdEntryCount == rhs.ppsIdEntryCount) && (ppsIdEntries == rhs.ppsIdEntries); # endif } bool operator!=(VideoEncodeH264EmitPictureParametersEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264EmitPictureParametersEXT; const void *pNext = {}; uint8_t spsId = {}; VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable = {}; uint32_t ppsIdEntryCount = {}; const uint8_t *ppsIdEntries = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEncodeH264EmitPictureParametersEXT) == sizeof(VkVideoEncodeH264EmitPictureParametersEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEncodeH264EmitPictureParametersEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoEncodeH264EmitPictureParametersEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEncodeH264FrameSizeEXT { using NativeType = VkVideoEncodeH264FrameSizeEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEncodeH264FrameSizeEXT(uint32_t frameISize_ = {}, uint32_t framePSize_ = {}, uint32_t frameBSize_ = {}) VULKAN_HPP_NOEXCEPT : frameISize(frameISize_), framePSize(framePSize_), frameBSize(frameBSize_) { } VULKAN_HPP_CONSTEXPR VideoEncodeH264FrameSizeEXT(VideoEncodeH264FrameSizeEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH264FrameSizeEXT(VkVideoEncodeH264FrameSizeEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoEncodeH264FrameSizeEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeH264FrameSizeEXT &operator=(VideoEncodeH264FrameSizeEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH264FrameSizeEXT &operator=(VkVideoEncodeH264FrameSizeEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264FrameSizeEXT &setFrameISize(uint32_t frameISize_) VULKAN_HPP_NOEXCEPT { frameISize = frameISize_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264FrameSizeEXT &setFramePSize(uint32_t framePSize_) VULKAN_HPP_NOEXCEPT { framePSize = framePSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264FrameSizeEXT &setFrameBSize(uint32_t frameBSize_) VULKAN_HPP_NOEXCEPT { frameBSize = frameBSize_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoEncodeH264FrameSizeEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEncodeH264FrameSizeEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(frameISize, framePSize, frameBSize); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoEncodeH264FrameSizeEXT const &) const = default; # else bool operator==(VideoEncodeH264FrameSizeEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (frameISize == rhs.frameISize) && (framePSize == rhs.framePSize) && (frameBSize == rhs.frameBSize); # endif } bool operator!=(VideoEncodeH264FrameSizeEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: uint32_t frameISize = {}; uint32_t framePSize = {}; uint32_t frameBSize = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT) == sizeof(VkVideoEncodeH264FrameSizeEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEncodeH264FrameSizeEXT is not nothrow_move_constructible!"); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEncodeH264ReferenceListsEXT { using NativeType = VkVideoEncodeH264ReferenceListsEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264ReferenceListsEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEncodeH264ReferenceListsEXT(uint8_t referenceList0EntryCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT *pReferenceList0Entries_ = {}, uint8_t referenceList1EntryCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT *pReferenceList1Entries_ = {}, const StdVideoEncodeH264RefMemMgmtCtrlOperations *pMemMgmtCtrlOperations_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), referenceList0EntryCount(referenceList0EntryCount_), pReferenceList0Entries(pReferenceList0Entries_), referenceList1EntryCount(referenceList1EntryCount_), pReferenceList1Entries(pReferenceList1Entries_), pMemMgmtCtrlOperations(pMemMgmtCtrlOperations_) { } VULKAN_HPP_CONSTEXPR VideoEncodeH264ReferenceListsEXT(VideoEncodeH264ReferenceListsEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH264ReferenceListsEXT(VkVideoEncodeH264ReferenceListsEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoEncodeH264ReferenceListsEXT(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoEncodeH264ReferenceListsEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &referenceList0Entries_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &referenceList1Entries_ = {}, const StdVideoEncodeH264RefMemMgmtCtrlOperations *pMemMgmtCtrlOperations_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , referenceList0EntryCount(static_cast(referenceList0Entries_.size())) , pReferenceList0Entries(referenceList0Entries_.data()) , referenceList1EntryCount(static_cast(referenceList1Entries_.size())) , pReferenceList1Entries(referenceList1Entries_.data()) , pMemMgmtCtrlOperations(pMemMgmtCtrlOperations_) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeH264ReferenceListsEXT &operator=(VideoEncodeH264ReferenceListsEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH264ReferenceListsEXT &operator=(VkVideoEncodeH264ReferenceListsEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ReferenceListsEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ReferenceListsEXT &setReferenceList0EntryCount(uint8_t referenceList0EntryCount_) VULKAN_HPP_NOEXCEPT { referenceList0EntryCount = referenceList0EntryCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ReferenceListsEXT & setPReferenceList0Entries(const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT *pReferenceList0Entries_) VULKAN_HPP_NOEXCEPT { pReferenceList0Entries = pReferenceList0Entries_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoEncodeH264ReferenceListsEXT &setReferenceList0Entries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &referenceList0Entries_) VULKAN_HPP_NOEXCEPT { referenceList0EntryCount = static_cast(referenceList0Entries_.size()); pReferenceList0Entries = referenceList0Entries_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ReferenceListsEXT &setReferenceList1EntryCount(uint8_t referenceList1EntryCount_) VULKAN_HPP_NOEXCEPT { referenceList1EntryCount = referenceList1EntryCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ReferenceListsEXT & setPReferenceList1Entries(const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT *pReferenceList1Entries_) VULKAN_HPP_NOEXCEPT { pReferenceList1Entries = pReferenceList1Entries_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoEncodeH264ReferenceListsEXT &setReferenceList1Entries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &referenceList1Entries_) VULKAN_HPP_NOEXCEPT { referenceList1EntryCount = static_cast(referenceList1Entries_.size()); pReferenceList1Entries = referenceList1Entries_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ReferenceListsEXT & setPMemMgmtCtrlOperations(const StdVideoEncodeH264RefMemMgmtCtrlOperations *pMemMgmtCtrlOperations_) VULKAN_HPP_NOEXCEPT { pMemMgmtCtrlOperations = pMemMgmtCtrlOperations_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoEncodeH264ReferenceListsEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEncodeH264ReferenceListsEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, referenceList0EntryCount, pReferenceList0Entries, referenceList1EntryCount, pReferenceList1Entries, pMemMgmtCtrlOperations); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoEncodeH264ReferenceListsEXT const &) const = default; # else bool operator==(VideoEncodeH264ReferenceListsEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (referenceList0EntryCount == rhs.referenceList0EntryCount) && (pReferenceList0Entries == rhs.pReferenceList0Entries) && (referenceList1EntryCount == rhs.referenceList1EntryCount) && (pReferenceList1Entries == rhs.pReferenceList1Entries) && (pMemMgmtCtrlOperations == rhs.pMemMgmtCtrlOperations); # endif } bool operator!=(VideoEncodeH264ReferenceListsEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264ReferenceListsEXT; const void *pNext = {}; uint8_t referenceList0EntryCount = {}; const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT *pReferenceList0Entries = {}; uint8_t referenceList1EntryCount = {}; const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT *pReferenceList1Entries = {}; const StdVideoEncodeH264RefMemMgmtCtrlOperations *pMemMgmtCtrlOperations = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsEXT) == sizeof(VkVideoEncodeH264ReferenceListsEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEncodeH264ReferenceListsEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoEncodeH264ReferenceListsEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEncodeH264NaluSliceEXT { using NativeType = VkVideoEncodeH264NaluSliceEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264NaluSliceEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEncodeH264NaluSliceEXT(uint32_t mbCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsEXT *pReferenceFinalLists_ = {}, const StdVideoEncodeH264SliceHeader *pSliceHeaderStd_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), mbCount(mbCount_), pReferenceFinalLists(pReferenceFinalLists_), pSliceHeaderStd(pSliceHeaderStd_) { } VULKAN_HPP_CONSTEXPR VideoEncodeH264NaluSliceEXT(VideoEncodeH264NaluSliceEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH264NaluSliceEXT(VkVideoEncodeH264NaluSliceEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoEncodeH264NaluSliceEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeH264NaluSliceEXT &operator=(VideoEncodeH264NaluSliceEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH264NaluSliceEXT &operator=(VkVideoEncodeH264NaluSliceEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceEXT &setMbCount(uint32_t mbCount_) VULKAN_HPP_NOEXCEPT { mbCount = mbCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceEXT & setPReferenceFinalLists(const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsEXT *pReferenceFinalLists_) VULKAN_HPP_NOEXCEPT { pReferenceFinalLists = pReferenceFinalLists_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceEXT &setPSliceHeaderStd(const StdVideoEncodeH264SliceHeader *pSliceHeaderStd_) VULKAN_HPP_NOEXCEPT { pSliceHeaderStd = pSliceHeaderStd_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoEncodeH264NaluSliceEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEncodeH264NaluSliceEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, mbCount, pReferenceFinalLists, pSliceHeaderStd); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoEncodeH264NaluSliceEXT const &) const = default; # else bool operator==(VideoEncodeH264NaluSliceEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (mbCount == rhs.mbCount) && (pReferenceFinalLists == rhs.pReferenceFinalLists) && (pSliceHeaderStd == rhs.pSliceHeaderStd); # endif } bool operator!=(VideoEncodeH264NaluSliceEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264NaluSliceEXT; const void *pNext = {}; uint32_t mbCount = {}; const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsEXT *pReferenceFinalLists = {}; const StdVideoEncodeH264SliceHeader *pSliceHeaderStd = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT) == sizeof(VkVideoEncodeH264NaluSliceEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEncodeH264NaluSliceEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoEncodeH264NaluSliceEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEncodeH264ProfileEXT { using NativeType = VkVideoEncodeH264ProfileEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264ProfileEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEncodeH264ProfileEXT(StdVideoH264ProfileIdc stdProfileIdc_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), stdProfileIdc(stdProfileIdc_) { } VULKAN_HPP_CONSTEXPR VideoEncodeH264ProfileEXT(VideoEncodeH264ProfileEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH264ProfileEXT(VkVideoEncodeH264ProfileEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoEncodeH264ProfileEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeH264ProfileEXT &operator=(VideoEncodeH264ProfileEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH264ProfileEXT &operator=(VkVideoEncodeH264ProfileEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ProfileEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ProfileEXT &setStdProfileIdc(StdVideoH264ProfileIdc stdProfileIdc_) VULKAN_HPP_NOEXCEPT { stdProfileIdc = stdProfileIdc_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoEncodeH264ProfileEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEncodeH264ProfileEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, stdProfileIdc); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) std::strong_ordering operator<=>(VideoEncodeH264ProfileEXT const &rhs) const VULKAN_HPP_NOEXCEPT { if(auto cmp = sType <=> rhs.sType; cmp != 0) return cmp; if(auto cmp = pNext <=> rhs.pNext; cmp != 0) return cmp; if(auto cmp = memcmp(&stdProfileIdc, &rhs.stdProfileIdc, sizeof(StdVideoH264ProfileIdc)); cmp != 0) return (cmp < 0) ? std::strong_ordering::less : std::strong_ordering::greater; return std::strong_ordering::equivalent; } # endif bool operator==(VideoEncodeH264ProfileEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return (sType == rhs.sType) && (pNext == rhs.pNext) && (memcmp(&stdProfileIdc, &rhs.stdProfileIdc, sizeof(StdVideoH264ProfileIdc)) == 0); } bool operator!=(VideoEncodeH264ProfileEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264ProfileEXT; const void *pNext = {}; StdVideoH264ProfileIdc stdProfileIdc = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileEXT) == sizeof(VkVideoEncodeH264ProfileEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEncodeH264ProfileEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoEncodeH264ProfileEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEncodeH264QpEXT { using NativeType = VkVideoEncodeH264QpEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEncodeH264QpEXT(int32_t qpI_ = {}, int32_t qpP_ = {}, int32_t qpB_ = {}) VULKAN_HPP_NOEXCEPT : qpI(qpI_), qpP(qpP_), qpB(qpB_) { } VULKAN_HPP_CONSTEXPR VideoEncodeH264QpEXT(VideoEncodeH264QpEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH264QpEXT(VkVideoEncodeH264QpEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoEncodeH264QpEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeH264QpEXT &operator=(VideoEncodeH264QpEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH264QpEXT &operator=(VkVideoEncodeH264QpEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264QpEXT &setQpI(int32_t qpI_) VULKAN_HPP_NOEXCEPT { qpI = qpI_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264QpEXT &setQpP(int32_t qpP_) VULKAN_HPP_NOEXCEPT { qpP = qpP_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264QpEXT &setQpB(int32_t qpB_) VULKAN_HPP_NOEXCEPT { qpB = qpB_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoEncodeH264QpEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEncodeH264QpEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(qpI, qpP, qpB); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoEncodeH264QpEXT const &) const = default; # else bool operator==(VideoEncodeH264QpEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (qpI == rhs.qpI) && (qpP == rhs.qpP) && (qpB == rhs.qpB); # endif } bool operator!=(VideoEncodeH264QpEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: int32_t qpI = {}; int32_t qpP = {}; int32_t qpB = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT) == sizeof(VkVideoEncodeH264QpEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEncodeH264QpEXT is not nothrow_move_constructible!"); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEncodeH264RateControlInfoEXT { using NativeType = VkVideoEncodeH264RateControlInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264RateControlInfoEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlInfoEXT(uint32_t gopFrameCount_ = {}, uint32_t idrPeriod_ = {}, uint32_t consecutiveBFrameCount_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureFlagBitsEXT rateControlStructure_ = VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureFlagBitsEXT::eUnknown, uint8_t temporalLayerCount_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), gopFrameCount(gopFrameCount_), idrPeriod(idrPeriod_), consecutiveBFrameCount(consecutiveBFrameCount_), rateControlStructure(rateControlStructure_), temporalLayerCount(temporalLayerCount_) { } VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlInfoEXT(VideoEncodeH264RateControlInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH264RateControlInfoEXT(VkVideoEncodeH264RateControlInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoEncodeH264RateControlInfoEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeH264RateControlInfoEXT &operator=(VideoEncodeH264RateControlInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH264RateControlInfoEXT &operator=(VkVideoEncodeH264RateControlInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT &setGopFrameCount(uint32_t gopFrameCount_) VULKAN_HPP_NOEXCEPT { gopFrameCount = gopFrameCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT &setIdrPeriod(uint32_t idrPeriod_) VULKAN_HPP_NOEXCEPT { idrPeriod = idrPeriod_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT &setConsecutiveBFrameCount(uint32_t consecutiveBFrameCount_) VULKAN_HPP_NOEXCEPT { consecutiveBFrameCount = consecutiveBFrameCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT & setRateControlStructure(VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureFlagBitsEXT rateControlStructure_) VULKAN_HPP_NOEXCEPT { rateControlStructure = rateControlStructure_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT &setTemporalLayerCount(uint8_t temporalLayerCount_) VULKAN_HPP_NOEXCEPT { temporalLayerCount = temporalLayerCount_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoEncodeH264RateControlInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEncodeH264RateControlInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, gopFrameCount, idrPeriod, consecutiveBFrameCount, rateControlStructure, temporalLayerCount); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoEncodeH264RateControlInfoEXT const &) const = default; # else bool operator==(VideoEncodeH264RateControlInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (gopFrameCount == rhs.gopFrameCount) && (idrPeriod == rhs.idrPeriod) && (consecutiveBFrameCount == rhs.consecutiveBFrameCount) && (rateControlStructure == rhs.rateControlStructure) && (temporalLayerCount == rhs.temporalLayerCount); # endif } bool operator!=(VideoEncodeH264RateControlInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264RateControlInfoEXT; const void *pNext = {}; uint32_t gopFrameCount = {}; uint32_t idrPeriod = {}; uint32_t consecutiveBFrameCount = {}; VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureFlagBitsEXT rateControlStructure = VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureFlagBitsEXT::eUnknown; uint8_t temporalLayerCount = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoEXT) == sizeof(VkVideoEncodeH264RateControlInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEncodeH264RateControlInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoEncodeH264RateControlInfoEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEncodeH264RateControlLayerInfoEXT { using NativeType = VkVideoEncodeH264RateControlLayerInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264RateControlLayerInfoEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlLayerInfoEXT(uint8_t temporalLayerId_ = {}, VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT initialRcQp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT minQp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT maxQp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT maxFrameSize_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), temporalLayerId(temporalLayerId_), useInitialRcQp(useInitialRcQp_), initialRcQp(initialRcQp_), useMinQp(useMinQp_), minQp(minQp_), useMaxQp(useMaxQp_), maxQp(maxQp_), useMaxFrameSize(useMaxFrameSize_), maxFrameSize(maxFrameSize_) { } VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlLayerInfoEXT(VideoEncodeH264RateControlLayerInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH264RateControlLayerInfoEXT(VkVideoEncodeH264RateControlLayerInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoEncodeH264RateControlLayerInfoEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeH264RateControlLayerInfoEXT &operator=(VideoEncodeH264RateControlLayerInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH264RateControlLayerInfoEXT &operator=(VkVideoEncodeH264RateControlLayerInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT &setTemporalLayerId(uint8_t temporalLayerId_) VULKAN_HPP_NOEXCEPT { temporalLayerId = temporalLayerId_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT &setUseInitialRcQp(VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp_) VULKAN_HPP_NOEXCEPT { useInitialRcQp = useInitialRcQp_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setInitialRcQp(VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const &initialRcQp_) VULKAN_HPP_NOEXCEPT { initialRcQp = initialRcQp_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT &setUseMinQp(VULKAN_HPP_NAMESPACE::Bool32 useMinQp_) VULKAN_HPP_NOEXCEPT { useMinQp = useMinQp_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT &setMinQp(VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const &minQp_) VULKAN_HPP_NOEXCEPT { minQp = minQp_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT &setUseMaxQp(VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_) VULKAN_HPP_NOEXCEPT { useMaxQp = useMaxQp_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT &setMaxQp(VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const &maxQp_) VULKAN_HPP_NOEXCEPT { maxQp = maxQp_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT &setUseMaxFrameSize(VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_) VULKAN_HPP_NOEXCEPT { useMaxFrameSize = useMaxFrameSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setMaxFrameSize(VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT const &maxFrameSize_) VULKAN_HPP_NOEXCEPT { maxFrameSize = maxFrameSize_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoEncodeH264RateControlLayerInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEncodeH264RateControlLayerInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, temporalLayerId, useInitialRcQp, initialRcQp, useMinQp, minQp, useMaxQp, maxQp, useMaxFrameSize, maxFrameSize); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoEncodeH264RateControlLayerInfoEXT const &) const = default; # else bool operator==(VideoEncodeH264RateControlLayerInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (temporalLayerId == rhs.temporalLayerId) && (useInitialRcQp == rhs.useInitialRcQp) && (initialRcQp == rhs.initialRcQp) && (useMinQp == rhs.useMinQp) && (minQp == rhs.minQp) && (useMaxQp == rhs.useMaxQp) && (maxQp == rhs.maxQp) && (useMaxFrameSize == rhs.useMaxFrameSize) && (maxFrameSize == rhs.maxFrameSize); # endif } bool operator!=(VideoEncodeH264RateControlLayerInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264RateControlLayerInfoEXT; const void *pNext = {}; uint8_t temporalLayerId = {}; VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp = {}; VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT initialRcQp = {}; VULKAN_HPP_NAMESPACE::Bool32 useMinQp = {}; VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT minQp = {}; VULKAN_HPP_NAMESPACE::Bool32 useMaxQp = {}; VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT maxQp = {}; VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize = {}; VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT maxFrameSize = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoEXT) == sizeof(VkVideoEncodeH264RateControlLayerInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEncodeH264RateControlLayerInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoEncodeH264RateControlLayerInfoEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEncodeH264SessionParametersAddInfoEXT { using NativeType = VkVideoEncodeH264SessionParametersAddInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionParametersAddInfoEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersAddInfoEXT(uint32_t spsStdCount_ = {}, const StdVideoH264SequenceParameterSet *pSpsStd_ = {}, uint32_t ppsStdCount_ = {}, const StdVideoH264PictureParameterSet *pPpsStd_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), spsStdCount(spsStdCount_), pSpsStd(pSpsStd_), ppsStdCount(ppsStdCount_), pPpsStd(pPpsStd_) { } VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersAddInfoEXT(VideoEncodeH264SessionParametersAddInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH264SessionParametersAddInfoEXT(VkVideoEncodeH264SessionParametersAddInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoEncodeH264SessionParametersAddInfoEXT(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoEncodeH264SessionParametersAddInfoEXT(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &spsStd_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &ppsStd_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , spsStdCount(static_cast(spsStd_.size())) , pSpsStd(spsStd_.data()) , ppsStdCount(static_cast(ppsStd_.size())) , pPpsStd(ppsStd_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeH264SessionParametersAddInfoEXT &operator=(VideoEncodeH264SessionParametersAddInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH264SessionParametersAddInfoEXT &operator=(VkVideoEncodeH264SessionParametersAddInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT &setSpsStdCount(uint32_t spsStdCount_) VULKAN_HPP_NOEXCEPT { spsStdCount = spsStdCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT &setPSpsStd(const StdVideoH264SequenceParameterSet *pSpsStd_) VULKAN_HPP_NOEXCEPT { pSpsStd = pSpsStd_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoEncodeH264SessionParametersAddInfoEXT & setSpsStd(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &spsStd_) VULKAN_HPP_NOEXCEPT { spsStdCount = static_cast(spsStd_.size()); pSpsStd = spsStd_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT &setPpsStdCount(uint32_t ppsStdCount_) VULKAN_HPP_NOEXCEPT { ppsStdCount = ppsStdCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT &setPPpsStd(const StdVideoH264PictureParameterSet *pPpsStd_) VULKAN_HPP_NOEXCEPT { pPpsStd = pPpsStd_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoEncodeH264SessionParametersAddInfoEXT & setPpsStd(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &ppsStd_) VULKAN_HPP_NOEXCEPT { ppsStdCount = static_cast(ppsStd_.size()); pPpsStd = ppsStd_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoEncodeH264SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEncodeH264SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, spsStdCount, pSpsStd, ppsStdCount, pPpsStd); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoEncodeH264SessionParametersAddInfoEXT const &) const = default; # else bool operator==(VideoEncodeH264SessionParametersAddInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (spsStdCount == rhs.spsStdCount) && (pSpsStd == rhs.pSpsStd) && (ppsStdCount == rhs.ppsStdCount) && (pPpsStd == rhs.pPpsStd); # endif } bool operator!=(VideoEncodeH264SessionParametersAddInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersAddInfoEXT; const void *pNext = {}; uint32_t spsStdCount = {}; const StdVideoH264SequenceParameterSet *pSpsStd = {}; uint32_t ppsStdCount = {}; const StdVideoH264PictureParameterSet *pPpsStd = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT) == sizeof(VkVideoEncodeH264SessionParametersAddInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEncodeH264SessionParametersAddInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoEncodeH264SessionParametersAddInfoEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEncodeH264SessionParametersCreateInfoEXT { using NativeType = VkVideoEncodeH264SessionParametersCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersCreateInfoEXT(uint32_t maxSpsStdCount_ = {}, uint32_t maxPpsStdCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT *pParametersAddInfo_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), maxSpsStdCount(maxSpsStdCount_), maxPpsStdCount(maxPpsStdCount_), pParametersAddInfo(pParametersAddInfo_) { } VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersCreateInfoEXT(VideoEncodeH264SessionParametersCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH264SessionParametersCreateInfoEXT(VkVideoEncodeH264SessionParametersCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoEncodeH264SessionParametersCreateInfoEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeH264SessionParametersCreateInfoEXT &operator=(VideoEncodeH264SessionParametersCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH264SessionParametersCreateInfoEXT &operator=(VkVideoEncodeH264SessionParametersCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT &setMaxSpsStdCount(uint32_t maxSpsStdCount_) VULKAN_HPP_NOEXCEPT { maxSpsStdCount = maxSpsStdCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT &setMaxPpsStdCount(uint32_t maxPpsStdCount_) VULKAN_HPP_NOEXCEPT { maxPpsStdCount = maxPpsStdCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT & setPParametersAddInfo(const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT *pParametersAddInfo_) VULKAN_HPP_NOEXCEPT { pParametersAddInfo = pParametersAddInfo_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoEncodeH264SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEncodeH264SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, maxSpsStdCount, maxPpsStdCount, pParametersAddInfo); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoEncodeH264SessionParametersCreateInfoEXT const &) const = default; # else bool operator==(VideoEncodeH264SessionParametersCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (maxSpsStdCount == rhs.maxSpsStdCount) && (maxPpsStdCount == rhs.maxPpsStdCount) && (pParametersAddInfo == rhs.pParametersAddInfo); # endif } bool operator!=(VideoEncodeH264SessionParametersCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT; const void *pNext = {}; uint32_t maxSpsStdCount = {}; uint32_t maxPpsStdCount = {}; const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT *pParametersAddInfo = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT) == sizeof(VkVideoEncodeH264SessionParametersCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEncodeH264SessionParametersCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoEncodeH264SessionParametersCreateInfoEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEncodeH264VclFrameInfoEXT { using NativeType = VkVideoEncodeH264VclFrameInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264VclFrameInfoEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEncodeH264VclFrameInfoEXT(const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsEXT *pReferenceFinalLists_ = {}, uint32_t naluSliceEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT *pNaluSliceEntries_ = {}, const StdVideoEncodeH264PictureInfo *pCurrentPictureInfo_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), pReferenceFinalLists(pReferenceFinalLists_), naluSliceEntryCount(naluSliceEntryCount_), pNaluSliceEntries(pNaluSliceEntries_), pCurrentPictureInfo(pCurrentPictureInfo_) { } VULKAN_HPP_CONSTEXPR VideoEncodeH264VclFrameInfoEXT(VideoEncodeH264VclFrameInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH264VclFrameInfoEXT(VkVideoEncodeH264VclFrameInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoEncodeH264VclFrameInfoEXT(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoEncodeH264VclFrameInfoEXT( const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsEXT *pReferenceFinalLists_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &naluSliceEntries_, const StdVideoEncodeH264PictureInfo *pCurrentPictureInfo_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , pReferenceFinalLists(pReferenceFinalLists_) , naluSliceEntryCount(static_cast(naluSliceEntries_.size())) , pNaluSliceEntries(naluSliceEntries_.data()) , pCurrentPictureInfo(pCurrentPictureInfo_) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeH264VclFrameInfoEXT &operator=(VideoEncodeH264VclFrameInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH264VclFrameInfoEXT &operator=(VkVideoEncodeH264VclFrameInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & setPReferenceFinalLists(const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsEXT *pReferenceFinalLists_) VULKAN_HPP_NOEXCEPT { pReferenceFinalLists = pReferenceFinalLists_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT &setNaluSliceEntryCount(uint32_t naluSliceEntryCount_) VULKAN_HPP_NOEXCEPT { naluSliceEntryCount = naluSliceEntryCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & setPNaluSliceEntries(const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT *pNaluSliceEntries_) VULKAN_HPP_NOEXCEPT { pNaluSliceEntries = pNaluSliceEntries_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoEncodeH264VclFrameInfoEXT &setNaluSliceEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &naluSliceEntries_) VULKAN_HPP_NOEXCEPT { naluSliceEntryCount = static_cast(naluSliceEntries_.size()); pNaluSliceEntries = naluSliceEntries_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & setPCurrentPictureInfo(const StdVideoEncodeH264PictureInfo *pCurrentPictureInfo_) VULKAN_HPP_NOEXCEPT { pCurrentPictureInfo = pCurrentPictureInfo_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoEncodeH264VclFrameInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEncodeH264VclFrameInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, pReferenceFinalLists, naluSliceEntryCount, pNaluSliceEntries, pCurrentPictureInfo); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoEncodeH264VclFrameInfoEXT const &) const = default; # else bool operator==(VideoEncodeH264VclFrameInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (pReferenceFinalLists == rhs.pReferenceFinalLists) && (naluSliceEntryCount == rhs.naluSliceEntryCount) && (pNaluSliceEntries == rhs.pNaluSliceEntries) && (pCurrentPictureInfo == rhs.pCurrentPictureInfo); # endif } bool operator!=(VideoEncodeH264VclFrameInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264VclFrameInfoEXT; const void *pNext = {}; const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsEXT *pReferenceFinalLists = {}; uint32_t naluSliceEntryCount = {}; const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT *pNaluSliceEntries = {}; const StdVideoEncodeH264PictureInfo *pCurrentPictureInfo = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEncodeH264VclFrameInfoEXT) == sizeof(VkVideoEncodeH264VclFrameInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEncodeH264VclFrameInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoEncodeH264VclFrameInfoEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEncodeH265CapabilitiesEXT { using NativeType = VkVideoEncodeH265CapabilitiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265CapabilitiesEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEncodeH265CapabilitiesEXT(VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265InputModeFlagsEXT inputModeFlags_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265OutputModeFlagsEXT outputModeFlags_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsEXT ctbSizes_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagsEXT transformBlockSizes_ = {}, uint8_t maxPPictureL0ReferenceCount_ = {}, uint8_t maxBPictureL0ReferenceCount_ = {}, uint8_t maxL1ReferenceCount_ = {}, uint8_t maxSubLayersCount_ = {}, uint8_t minLog2MinLumaCodingBlockSizeMinus3_ = {}, uint8_t maxLog2MinLumaCodingBlockSizeMinus3_ = {}, uint8_t minLog2MinLumaTransformBlockSizeMinus2_ = {}, uint8_t maxLog2MinLumaTransformBlockSizeMinus2_ = {}, uint8_t minMaxTransformHierarchyDepthInter_ = {}, uint8_t maxMaxTransformHierarchyDepthInter_ = {}, uint8_t minMaxTransformHierarchyDepthIntra_ = {}, uint8_t maxMaxTransformHierarchyDepthIntra_ = {}, uint8_t maxDiffCuQpDeltaDepth_ = {}, uint8_t minMaxNumMergeCand_ = {}, uint8_t maxMaxNumMergeCand_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), inputModeFlags(inputModeFlags_), outputModeFlags(outputModeFlags_), ctbSizes(ctbSizes_), transformBlockSizes(transformBlockSizes_), maxPPictureL0ReferenceCount(maxPPictureL0ReferenceCount_), maxBPictureL0ReferenceCount(maxBPictureL0ReferenceCount_), maxL1ReferenceCount(maxL1ReferenceCount_), maxSubLayersCount(maxSubLayersCount_), minLog2MinLumaCodingBlockSizeMinus3(minLog2MinLumaCodingBlockSizeMinus3_), maxLog2MinLumaCodingBlockSizeMinus3(maxLog2MinLumaCodingBlockSizeMinus3_), minLog2MinLumaTransformBlockSizeMinus2(minLog2MinLumaTransformBlockSizeMinus2_), maxLog2MinLumaTransformBlockSizeMinus2(maxLog2MinLumaTransformBlockSizeMinus2_), minMaxTransformHierarchyDepthInter(minMaxTransformHierarchyDepthInter_), maxMaxTransformHierarchyDepthInter(maxMaxTransformHierarchyDepthInter_), minMaxTransformHierarchyDepthIntra(minMaxTransformHierarchyDepthIntra_), maxMaxTransformHierarchyDepthIntra(maxMaxTransformHierarchyDepthIntra_), maxDiffCuQpDeltaDepth(maxDiffCuQpDeltaDepth_), minMaxNumMergeCand(minMaxNumMergeCand_), maxMaxNumMergeCand(maxMaxNumMergeCand_) { } VULKAN_HPP_CONSTEXPR VideoEncodeH265CapabilitiesEXT(VideoEncodeH265CapabilitiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH265CapabilitiesEXT(VkVideoEncodeH265CapabilitiesEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoEncodeH265CapabilitiesEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeH265CapabilitiesEXT &operator=(VideoEncodeH265CapabilitiesEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH265CapabilitiesEXT &operator=(VkVideoEncodeH265CapabilitiesEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkVideoEncodeH265CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEncodeH265CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, inputModeFlags, outputModeFlags, ctbSizes, transformBlockSizes, maxPPictureL0ReferenceCount, maxBPictureL0ReferenceCount, maxL1ReferenceCount, maxSubLayersCount, minLog2MinLumaCodingBlockSizeMinus3, maxLog2MinLumaCodingBlockSizeMinus3, minLog2MinLumaTransformBlockSizeMinus2, maxLog2MinLumaTransformBlockSizeMinus2, minMaxTransformHierarchyDepthInter, maxMaxTransformHierarchyDepthInter, minMaxTransformHierarchyDepthIntra, maxMaxTransformHierarchyDepthIntra, maxDiffCuQpDeltaDepth, minMaxNumMergeCand, maxMaxNumMergeCand); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoEncodeH265CapabilitiesEXT const &) const = default; # else bool operator==(VideoEncodeH265CapabilitiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (inputModeFlags == rhs.inputModeFlags) && (outputModeFlags == rhs.outputModeFlags) && (ctbSizes == rhs.ctbSizes) && (transformBlockSizes == rhs.transformBlockSizes) && (maxPPictureL0ReferenceCount == rhs.maxPPictureL0ReferenceCount) && (maxBPictureL0ReferenceCount == rhs.maxBPictureL0ReferenceCount) && (maxL1ReferenceCount == rhs.maxL1ReferenceCount) && (maxSubLayersCount == rhs.maxSubLayersCount) && (minLog2MinLumaCodingBlockSizeMinus3 == rhs.minLog2MinLumaCodingBlockSizeMinus3) && (maxLog2MinLumaCodingBlockSizeMinus3 == rhs.maxLog2MinLumaCodingBlockSizeMinus3) && (minLog2MinLumaTransformBlockSizeMinus2 == rhs.minLog2MinLumaTransformBlockSizeMinus2) && (maxLog2MinLumaTransformBlockSizeMinus2 == rhs.maxLog2MinLumaTransformBlockSizeMinus2) && (minMaxTransformHierarchyDepthInter == rhs.minMaxTransformHierarchyDepthInter) && (maxMaxTransformHierarchyDepthInter == rhs.maxMaxTransformHierarchyDepthInter) && (minMaxTransformHierarchyDepthIntra == rhs.minMaxTransformHierarchyDepthIntra) && (maxMaxTransformHierarchyDepthIntra == rhs.maxMaxTransformHierarchyDepthIntra) && (maxDiffCuQpDeltaDepth == rhs.maxDiffCuQpDeltaDepth) && (minMaxNumMergeCand == rhs.minMaxNumMergeCand) && (maxMaxNumMergeCand == rhs.maxMaxNumMergeCand); # endif } bool operator!=(VideoEncodeH265CapabilitiesEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265CapabilitiesEXT; void *pNext = {}; VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsEXT flags = {}; VULKAN_HPP_NAMESPACE::VideoEncodeH265InputModeFlagsEXT inputModeFlags = {}; VULKAN_HPP_NAMESPACE::VideoEncodeH265OutputModeFlagsEXT outputModeFlags = {}; VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsEXT ctbSizes = {}; VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagsEXT transformBlockSizes = {}; uint8_t maxPPictureL0ReferenceCount = {}; uint8_t maxBPictureL0ReferenceCount = {}; uint8_t maxL1ReferenceCount = {}; uint8_t maxSubLayersCount = {}; uint8_t minLog2MinLumaCodingBlockSizeMinus3 = {}; uint8_t maxLog2MinLumaCodingBlockSizeMinus3 = {}; uint8_t minLog2MinLumaTransformBlockSizeMinus2 = {}; uint8_t maxLog2MinLumaTransformBlockSizeMinus2 = {}; uint8_t minMaxTransformHierarchyDepthInter = {}; uint8_t maxMaxTransformHierarchyDepthInter = {}; uint8_t minMaxTransformHierarchyDepthIntra = {}; uint8_t maxMaxTransformHierarchyDepthIntra = {}; uint8_t maxDiffCuQpDeltaDepth = {}; uint8_t minMaxNumMergeCand = {}; uint8_t maxMaxNumMergeCand = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesEXT) == sizeof(VkVideoEncodeH265CapabilitiesEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEncodeH265CapabilitiesEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoEncodeH265CapabilitiesEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEncodeH265DpbSlotInfoEXT { using NativeType = VkVideoEncodeH265DpbSlotInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265DpbSlotInfoEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEncodeH265DpbSlotInfoEXT(int8_t slotIndex_ = {}, const StdVideoEncodeH265ReferenceInfo *pStdReferenceInfo_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), slotIndex(slotIndex_), pStdReferenceInfo(pStdReferenceInfo_) { } VULKAN_HPP_CONSTEXPR VideoEncodeH265DpbSlotInfoEXT(VideoEncodeH265DpbSlotInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH265DpbSlotInfoEXT(VkVideoEncodeH265DpbSlotInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoEncodeH265DpbSlotInfoEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeH265DpbSlotInfoEXT &operator=(VideoEncodeH265DpbSlotInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH265DpbSlotInfoEXT &operator=(VkVideoEncodeH265DpbSlotInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265DpbSlotInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265DpbSlotInfoEXT &setSlotIndex(int8_t slotIndex_) VULKAN_HPP_NOEXCEPT { slotIndex = slotIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265DpbSlotInfoEXT & setPStdReferenceInfo(const StdVideoEncodeH265ReferenceInfo *pStdReferenceInfo_) VULKAN_HPP_NOEXCEPT { pStdReferenceInfo = pStdReferenceInfo_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoEncodeH265DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEncodeH265DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, slotIndex, pStdReferenceInfo); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoEncodeH265DpbSlotInfoEXT const &) const = default; # else bool operator==(VideoEncodeH265DpbSlotInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (slotIndex == rhs.slotIndex) && (pStdReferenceInfo == rhs.pStdReferenceInfo); # endif } bool operator!=(VideoEncodeH265DpbSlotInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265DpbSlotInfoEXT; const void *pNext = {}; int8_t slotIndex = {}; const StdVideoEncodeH265ReferenceInfo *pStdReferenceInfo = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT) == sizeof(VkVideoEncodeH265DpbSlotInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEncodeH265DpbSlotInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoEncodeH265DpbSlotInfoEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEncodeH265EmitPictureParametersEXT { using NativeType = VkVideoEncodeH265EmitPictureParametersEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265EmitPictureParametersEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEncodeH265EmitPictureParametersEXT(uint8_t vpsId_ = {}, uint8_t spsId_ = {}, VULKAN_HPP_NAMESPACE::Bool32 emitVpsEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ = {}, uint32_t ppsIdEntryCount_ = {}, const uint8_t *ppsIdEntries_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), vpsId(vpsId_), spsId(spsId_), emitVpsEnable(emitVpsEnable_), emitSpsEnable(emitSpsEnable_), ppsIdEntryCount(ppsIdEntryCount_), ppsIdEntries(ppsIdEntries_) { } VULKAN_HPP_CONSTEXPR VideoEncodeH265EmitPictureParametersEXT(VideoEncodeH265EmitPictureParametersEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH265EmitPictureParametersEXT(VkVideoEncodeH265EmitPictureParametersEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoEncodeH265EmitPictureParametersEXT(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoEncodeH265EmitPictureParametersEXT(uint8_t vpsId_, uint8_t spsId_, VULKAN_HPP_NAMESPACE::Bool32 emitVpsEnable_, VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &psIdEntries_, const void *pNext_ = nullptr) : pNext(pNext_) , vpsId(vpsId_) , spsId(spsId_) , emitVpsEnable(emitVpsEnable_) , emitSpsEnable(emitSpsEnable_) , ppsIdEntryCount(static_cast(psIdEntries_.size())) , ppsIdEntries(psIdEntries_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeH265EmitPictureParametersEXT &operator=(VideoEncodeH265EmitPictureParametersEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH265EmitPictureParametersEXT &operator=(VkVideoEncodeH265EmitPictureParametersEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersEXT &setVpsId(uint8_t vpsId_) VULKAN_HPP_NOEXCEPT { vpsId = vpsId_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersEXT &setSpsId(uint8_t spsId_) VULKAN_HPP_NOEXCEPT { spsId = spsId_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersEXT &setEmitVpsEnable(VULKAN_HPP_NAMESPACE::Bool32 emitVpsEnable_) VULKAN_HPP_NOEXCEPT { emitVpsEnable = emitVpsEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersEXT &setEmitSpsEnable(VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_) VULKAN_HPP_NOEXCEPT { emitSpsEnable = emitSpsEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersEXT &setPpsIdEntryCount(uint32_t ppsIdEntryCount_) VULKAN_HPP_NOEXCEPT { ppsIdEntryCount = ppsIdEntryCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersEXT &setPpsIdEntries(const uint8_t *ppsIdEntries_) VULKAN_HPP_NOEXCEPT { ppsIdEntries = ppsIdEntries_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoEncodeH265EmitPictureParametersEXT & setPsIdEntries(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &psIdEntries_) VULKAN_HPP_NOEXCEPT { ppsIdEntryCount = static_cast(psIdEntries_.size()); ppsIdEntries = psIdEntries_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoEncodeH265EmitPictureParametersEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEncodeH265EmitPictureParametersEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, vpsId, spsId, emitVpsEnable, emitSpsEnable, ppsIdEntryCount, ppsIdEntries); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoEncodeH265EmitPictureParametersEXT const &) const = default; # else bool operator==(VideoEncodeH265EmitPictureParametersEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (vpsId == rhs.vpsId) && (spsId == rhs.spsId) && (emitVpsEnable == rhs.emitVpsEnable) && (emitSpsEnable == rhs.emitSpsEnable) && (ppsIdEntryCount == rhs.ppsIdEntryCount) && (ppsIdEntries == rhs.ppsIdEntries); # endif } bool operator!=(VideoEncodeH265EmitPictureParametersEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265EmitPictureParametersEXT; const void *pNext = {}; uint8_t vpsId = {}; uint8_t spsId = {}; VULKAN_HPP_NAMESPACE::Bool32 emitVpsEnable = {}; VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable = {}; uint32_t ppsIdEntryCount = {}; const uint8_t *ppsIdEntries = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEncodeH265EmitPictureParametersEXT) == sizeof(VkVideoEncodeH265EmitPictureParametersEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEncodeH265EmitPictureParametersEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoEncodeH265EmitPictureParametersEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEncodeH265FrameSizeEXT { using NativeType = VkVideoEncodeH265FrameSizeEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEncodeH265FrameSizeEXT(uint32_t frameISize_ = {}, uint32_t framePSize_ = {}, uint32_t frameBSize_ = {}) VULKAN_HPP_NOEXCEPT : frameISize(frameISize_), framePSize(framePSize_), frameBSize(frameBSize_) { } VULKAN_HPP_CONSTEXPR VideoEncodeH265FrameSizeEXT(VideoEncodeH265FrameSizeEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH265FrameSizeEXT(VkVideoEncodeH265FrameSizeEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoEncodeH265FrameSizeEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeH265FrameSizeEXT &operator=(VideoEncodeH265FrameSizeEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH265FrameSizeEXT &operator=(VkVideoEncodeH265FrameSizeEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265FrameSizeEXT &setFrameISize(uint32_t frameISize_) VULKAN_HPP_NOEXCEPT { frameISize = frameISize_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265FrameSizeEXT &setFramePSize(uint32_t framePSize_) VULKAN_HPP_NOEXCEPT { framePSize = framePSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265FrameSizeEXT &setFrameBSize(uint32_t frameBSize_) VULKAN_HPP_NOEXCEPT { frameBSize = frameBSize_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoEncodeH265FrameSizeEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEncodeH265FrameSizeEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(frameISize, framePSize, frameBSize); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoEncodeH265FrameSizeEXT const &) const = default; # else bool operator==(VideoEncodeH265FrameSizeEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (frameISize == rhs.frameISize) && (framePSize == rhs.framePSize) && (frameBSize == rhs.frameBSize); # endif } bool operator!=(VideoEncodeH265FrameSizeEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: uint32_t frameISize = {}; uint32_t framePSize = {}; uint32_t frameBSize = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT) == sizeof(VkVideoEncodeH265FrameSizeEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEncodeH265FrameSizeEXT is not nothrow_move_constructible!"); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEncodeH265ReferenceListsEXT { using NativeType = VkVideoEncodeH265ReferenceListsEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265ReferenceListsEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEncodeH265ReferenceListsEXT(uint8_t referenceList0EntryCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT *pReferenceList0Entries_ = {}, uint8_t referenceList1EntryCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT *pReferenceList1Entries_ = {}, const StdVideoEncodeH265ReferenceModifications *pReferenceModifications_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), referenceList0EntryCount(referenceList0EntryCount_), pReferenceList0Entries(pReferenceList0Entries_), referenceList1EntryCount(referenceList1EntryCount_), pReferenceList1Entries(pReferenceList1Entries_), pReferenceModifications(pReferenceModifications_) { } VULKAN_HPP_CONSTEXPR VideoEncodeH265ReferenceListsEXT(VideoEncodeH265ReferenceListsEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH265ReferenceListsEXT(VkVideoEncodeH265ReferenceListsEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoEncodeH265ReferenceListsEXT(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoEncodeH265ReferenceListsEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &referenceList0Entries_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &referenceList1Entries_ = {}, const StdVideoEncodeH265ReferenceModifications *pReferenceModifications_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , referenceList0EntryCount(static_cast(referenceList0Entries_.size())) , pReferenceList0Entries(referenceList0Entries_.data()) , referenceList1EntryCount(static_cast(referenceList1Entries_.size())) , pReferenceList1Entries(referenceList1Entries_.data()) , pReferenceModifications(pReferenceModifications_) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeH265ReferenceListsEXT &operator=(VideoEncodeH265ReferenceListsEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH265ReferenceListsEXT &operator=(VkVideoEncodeH265ReferenceListsEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsEXT &setReferenceList0EntryCount(uint8_t referenceList0EntryCount_) VULKAN_HPP_NOEXCEPT { referenceList0EntryCount = referenceList0EntryCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsEXT & setPReferenceList0Entries(const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT *pReferenceList0Entries_) VULKAN_HPP_NOEXCEPT { pReferenceList0Entries = pReferenceList0Entries_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoEncodeH265ReferenceListsEXT &setReferenceList0Entries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &referenceList0Entries_) VULKAN_HPP_NOEXCEPT { referenceList0EntryCount = static_cast(referenceList0Entries_.size()); pReferenceList0Entries = referenceList0Entries_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsEXT &setReferenceList1EntryCount(uint8_t referenceList1EntryCount_) VULKAN_HPP_NOEXCEPT { referenceList1EntryCount = referenceList1EntryCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsEXT & setPReferenceList1Entries(const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT *pReferenceList1Entries_) VULKAN_HPP_NOEXCEPT { pReferenceList1Entries = pReferenceList1Entries_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoEncodeH265ReferenceListsEXT &setReferenceList1Entries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &referenceList1Entries_) VULKAN_HPP_NOEXCEPT { referenceList1EntryCount = static_cast(referenceList1Entries_.size()); pReferenceList1Entries = referenceList1Entries_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsEXT & setPReferenceModifications(const StdVideoEncodeH265ReferenceModifications *pReferenceModifications_) VULKAN_HPP_NOEXCEPT { pReferenceModifications = pReferenceModifications_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoEncodeH265ReferenceListsEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEncodeH265ReferenceListsEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, referenceList0EntryCount, pReferenceList0Entries, referenceList1EntryCount, pReferenceList1Entries, pReferenceModifications); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoEncodeH265ReferenceListsEXT const &) const = default; # else bool operator==(VideoEncodeH265ReferenceListsEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (referenceList0EntryCount == rhs.referenceList0EntryCount) && (pReferenceList0Entries == rhs.pReferenceList0Entries) && (referenceList1EntryCount == rhs.referenceList1EntryCount) && (pReferenceList1Entries == rhs.pReferenceList1Entries) && (pReferenceModifications == rhs.pReferenceModifications); # endif } bool operator!=(VideoEncodeH265ReferenceListsEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265ReferenceListsEXT; const void *pNext = {}; uint8_t referenceList0EntryCount = {}; const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT *pReferenceList0Entries = {}; uint8_t referenceList1EntryCount = {}; const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT *pReferenceList1Entries = {}; const StdVideoEncodeH265ReferenceModifications *pReferenceModifications = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT) == sizeof(VkVideoEncodeH265ReferenceListsEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEncodeH265ReferenceListsEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoEncodeH265ReferenceListsEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEncodeH265NaluSliceSegmentEXT { using NativeType = VkVideoEncodeH265NaluSliceSegmentEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265NaluSliceSegmentEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEncodeH265NaluSliceSegmentEXT(uint32_t ctbCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT *pReferenceFinalLists_ = {}, const StdVideoEncodeH265SliceSegmentHeader *pSliceSegmentHeaderStd_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), ctbCount(ctbCount_), pReferenceFinalLists(pReferenceFinalLists_), pSliceSegmentHeaderStd(pSliceSegmentHeaderStd_) { } VULKAN_HPP_CONSTEXPR VideoEncodeH265NaluSliceSegmentEXT(VideoEncodeH265NaluSliceSegmentEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH265NaluSliceSegmentEXT(VkVideoEncodeH265NaluSliceSegmentEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoEncodeH265NaluSliceSegmentEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeH265NaluSliceSegmentEXT &operator=(VideoEncodeH265NaluSliceSegmentEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH265NaluSliceSegmentEXT &operator=(VkVideoEncodeH265NaluSliceSegmentEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentEXT &setCtbCount(uint32_t ctbCount_) VULKAN_HPP_NOEXCEPT { ctbCount = ctbCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentEXT & setPReferenceFinalLists(const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT *pReferenceFinalLists_) VULKAN_HPP_NOEXCEPT { pReferenceFinalLists = pReferenceFinalLists_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentEXT & setPSliceSegmentHeaderStd(const StdVideoEncodeH265SliceSegmentHeader *pSliceSegmentHeaderStd_) VULKAN_HPP_NOEXCEPT { pSliceSegmentHeaderStd = pSliceSegmentHeaderStd_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoEncodeH265NaluSliceSegmentEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEncodeH265NaluSliceSegmentEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, ctbCount, pReferenceFinalLists, pSliceSegmentHeaderStd); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoEncodeH265NaluSliceSegmentEXT const &) const = default; # else bool operator==(VideoEncodeH265NaluSliceSegmentEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (ctbCount == rhs.ctbCount) && (pReferenceFinalLists == rhs.pReferenceFinalLists) && (pSliceSegmentHeaderStd == rhs.pSliceSegmentHeaderStd); # endif } bool operator!=(VideoEncodeH265NaluSliceSegmentEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265NaluSliceSegmentEXT; const void *pNext = {}; uint32_t ctbCount = {}; const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT *pReferenceFinalLists = {}; const StdVideoEncodeH265SliceSegmentHeader *pSliceSegmentHeaderStd = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentEXT) == sizeof(VkVideoEncodeH265NaluSliceSegmentEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEncodeH265NaluSliceSegmentEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoEncodeH265NaluSliceSegmentEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEncodeH265ProfileEXT { using NativeType = VkVideoEncodeH265ProfileEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265ProfileEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEncodeH265ProfileEXT(StdVideoH265ProfileIdc stdProfileIdc_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), stdProfileIdc(stdProfileIdc_) { } VULKAN_HPP_CONSTEXPR VideoEncodeH265ProfileEXT(VideoEncodeH265ProfileEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH265ProfileEXT(VkVideoEncodeH265ProfileEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoEncodeH265ProfileEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeH265ProfileEXT &operator=(VideoEncodeH265ProfileEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH265ProfileEXT &operator=(VkVideoEncodeH265ProfileEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ProfileEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ProfileEXT &setStdProfileIdc(StdVideoH265ProfileIdc stdProfileIdc_) VULKAN_HPP_NOEXCEPT { stdProfileIdc = stdProfileIdc_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoEncodeH265ProfileEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEncodeH265ProfileEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, stdProfileIdc); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) std::strong_ordering operator<=>(VideoEncodeH265ProfileEXT const &rhs) const VULKAN_HPP_NOEXCEPT { if(auto cmp = sType <=> rhs.sType; cmp != 0) return cmp; if(auto cmp = pNext <=> rhs.pNext; cmp != 0) return cmp; if(auto cmp = memcmp(&stdProfileIdc, &rhs.stdProfileIdc, sizeof(StdVideoH265ProfileIdc)); cmp != 0) return (cmp < 0) ? std::strong_ordering::less : std::strong_ordering::greater; return std::strong_ordering::equivalent; } # endif bool operator==(VideoEncodeH265ProfileEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return (sType == rhs.sType) && (pNext == rhs.pNext) && (memcmp(&stdProfileIdc, &rhs.stdProfileIdc, sizeof(StdVideoH265ProfileIdc)) == 0); } bool operator!=(VideoEncodeH265ProfileEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265ProfileEXT; const void *pNext = {}; StdVideoH265ProfileIdc stdProfileIdc = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileEXT) == sizeof(VkVideoEncodeH265ProfileEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEncodeH265ProfileEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoEncodeH265ProfileEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEncodeH265QpEXT { using NativeType = VkVideoEncodeH265QpEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEncodeH265QpEXT(int32_t qpI_ = {}, int32_t qpP_ = {}, int32_t qpB_ = {}) VULKAN_HPP_NOEXCEPT : qpI(qpI_), qpP(qpP_), qpB(qpB_) { } VULKAN_HPP_CONSTEXPR VideoEncodeH265QpEXT(VideoEncodeH265QpEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH265QpEXT(VkVideoEncodeH265QpEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoEncodeH265QpEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeH265QpEXT &operator=(VideoEncodeH265QpEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH265QpEXT &operator=(VkVideoEncodeH265QpEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265QpEXT &setQpI(int32_t qpI_) VULKAN_HPP_NOEXCEPT { qpI = qpI_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265QpEXT &setQpP(int32_t qpP_) VULKAN_HPP_NOEXCEPT { qpP = qpP_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265QpEXT &setQpB(int32_t qpB_) VULKAN_HPP_NOEXCEPT { qpB = qpB_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoEncodeH265QpEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEncodeH265QpEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(qpI, qpP, qpB); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoEncodeH265QpEXT const &) const = default; # else bool operator==(VideoEncodeH265QpEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (qpI == rhs.qpI) && (qpP == rhs.qpP) && (qpB == rhs.qpB); # endif } bool operator!=(VideoEncodeH265QpEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: int32_t qpI = {}; int32_t qpP = {}; int32_t qpB = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT) == sizeof(VkVideoEncodeH265QpEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEncodeH265QpEXT is not nothrow_move_constructible!"); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEncodeH265RateControlInfoEXT { using NativeType = VkVideoEncodeH265RateControlInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265RateControlInfoEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlInfoEXT(uint32_t gopFrameCount_ = {}, uint32_t idrPeriod_ = {}, uint32_t consecutiveBFrameCount_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureFlagBitsEXT rateControlStructure_ = VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureFlagBitsEXT::eUnknown, uint8_t subLayerCount_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), gopFrameCount(gopFrameCount_), idrPeriod(idrPeriod_), consecutiveBFrameCount(consecutiveBFrameCount_), rateControlStructure(rateControlStructure_), subLayerCount(subLayerCount_) { } VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlInfoEXT(VideoEncodeH265RateControlInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH265RateControlInfoEXT(VkVideoEncodeH265RateControlInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoEncodeH265RateControlInfoEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeH265RateControlInfoEXT &operator=(VideoEncodeH265RateControlInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH265RateControlInfoEXT &operator=(VkVideoEncodeH265RateControlInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT &setGopFrameCount(uint32_t gopFrameCount_) VULKAN_HPP_NOEXCEPT { gopFrameCount = gopFrameCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT &setIdrPeriod(uint32_t idrPeriod_) VULKAN_HPP_NOEXCEPT { idrPeriod = idrPeriod_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT &setConsecutiveBFrameCount(uint32_t consecutiveBFrameCount_) VULKAN_HPP_NOEXCEPT { consecutiveBFrameCount = consecutiveBFrameCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT & setRateControlStructure(VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureFlagBitsEXT rateControlStructure_) VULKAN_HPP_NOEXCEPT { rateControlStructure = rateControlStructure_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT &setSubLayerCount(uint8_t subLayerCount_) VULKAN_HPP_NOEXCEPT { subLayerCount = subLayerCount_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoEncodeH265RateControlInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEncodeH265RateControlInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, gopFrameCount, idrPeriod, consecutiveBFrameCount, rateControlStructure, subLayerCount); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoEncodeH265RateControlInfoEXT const &) const = default; # else bool operator==(VideoEncodeH265RateControlInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (gopFrameCount == rhs.gopFrameCount) && (idrPeriod == rhs.idrPeriod) && (consecutiveBFrameCount == rhs.consecutiveBFrameCount) && (rateControlStructure == rhs.rateControlStructure) && (subLayerCount == rhs.subLayerCount); # endif } bool operator!=(VideoEncodeH265RateControlInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265RateControlInfoEXT; const void *pNext = {}; uint32_t gopFrameCount = {}; uint32_t idrPeriod = {}; uint32_t consecutiveBFrameCount = {}; VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureFlagBitsEXT rateControlStructure = VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureFlagBitsEXT::eUnknown; uint8_t subLayerCount = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoEXT) == sizeof(VkVideoEncodeH265RateControlInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEncodeH265RateControlInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoEncodeH265RateControlInfoEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEncodeH265RateControlLayerInfoEXT { using NativeType = VkVideoEncodeH265RateControlLayerInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265RateControlLayerInfoEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlLayerInfoEXT(uint8_t temporalId_ = {}, VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT initialRcQp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT minQp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT maxQp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT maxFrameSize_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), temporalId(temporalId_), useInitialRcQp(useInitialRcQp_), initialRcQp(initialRcQp_), useMinQp(useMinQp_), minQp(minQp_), useMaxQp(useMaxQp_), maxQp(maxQp_), useMaxFrameSize(useMaxFrameSize_), maxFrameSize(maxFrameSize_) { } VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlLayerInfoEXT(VideoEncodeH265RateControlLayerInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH265RateControlLayerInfoEXT(VkVideoEncodeH265RateControlLayerInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoEncodeH265RateControlLayerInfoEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeH265RateControlLayerInfoEXT &operator=(VideoEncodeH265RateControlLayerInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH265RateControlLayerInfoEXT &operator=(VkVideoEncodeH265RateControlLayerInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT &setTemporalId(uint8_t temporalId_) VULKAN_HPP_NOEXCEPT { temporalId = temporalId_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT &setUseInitialRcQp(VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp_) VULKAN_HPP_NOEXCEPT { useInitialRcQp = useInitialRcQp_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setInitialRcQp(VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const &initialRcQp_) VULKAN_HPP_NOEXCEPT { initialRcQp = initialRcQp_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT &setUseMinQp(VULKAN_HPP_NAMESPACE::Bool32 useMinQp_) VULKAN_HPP_NOEXCEPT { useMinQp = useMinQp_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT &setMinQp(VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const &minQp_) VULKAN_HPP_NOEXCEPT { minQp = minQp_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT &setUseMaxQp(VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_) VULKAN_HPP_NOEXCEPT { useMaxQp = useMaxQp_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT &setMaxQp(VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const &maxQp_) VULKAN_HPP_NOEXCEPT { maxQp = maxQp_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT &setUseMaxFrameSize(VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_) VULKAN_HPP_NOEXCEPT { useMaxFrameSize = useMaxFrameSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setMaxFrameSize(VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT const &maxFrameSize_) VULKAN_HPP_NOEXCEPT { maxFrameSize = maxFrameSize_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoEncodeH265RateControlLayerInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEncodeH265RateControlLayerInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, temporalId, useInitialRcQp, initialRcQp, useMinQp, minQp, useMaxQp, maxQp, useMaxFrameSize, maxFrameSize); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoEncodeH265RateControlLayerInfoEXT const &) const = default; # else bool operator==(VideoEncodeH265RateControlLayerInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (temporalId == rhs.temporalId) && (useInitialRcQp == rhs.useInitialRcQp) && (initialRcQp == rhs.initialRcQp) && (useMinQp == rhs.useMinQp) && (minQp == rhs.minQp) && (useMaxQp == rhs.useMaxQp) && (maxQp == rhs.maxQp) && (useMaxFrameSize == rhs.useMaxFrameSize) && (maxFrameSize == rhs.maxFrameSize); # endif } bool operator!=(VideoEncodeH265RateControlLayerInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265RateControlLayerInfoEXT; const void *pNext = {}; uint8_t temporalId = {}; VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp = {}; VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT initialRcQp = {}; VULKAN_HPP_NAMESPACE::Bool32 useMinQp = {}; VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT minQp = {}; VULKAN_HPP_NAMESPACE::Bool32 useMaxQp = {}; VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT maxQp = {}; VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize = {}; VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT maxFrameSize = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoEXT) == sizeof(VkVideoEncodeH265RateControlLayerInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEncodeH265RateControlLayerInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoEncodeH265RateControlLayerInfoEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEncodeH265SessionParametersAddInfoEXT { using NativeType = VkVideoEncodeH265SessionParametersAddInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionParametersAddInfoEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersAddInfoEXT(uint32_t vpsStdCount_ = {}, const StdVideoH265VideoParameterSet *pVpsStd_ = {}, uint32_t spsStdCount_ = {}, const StdVideoH265SequenceParameterSet *pSpsStd_ = {}, uint32_t ppsStdCount_ = {}, const StdVideoH265PictureParameterSet *pPpsStd_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), vpsStdCount(vpsStdCount_), pVpsStd(pVpsStd_), spsStdCount(spsStdCount_), pSpsStd(pSpsStd_), ppsStdCount(ppsStdCount_), pPpsStd(pPpsStd_) { } VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersAddInfoEXT(VideoEncodeH265SessionParametersAddInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH265SessionParametersAddInfoEXT(VkVideoEncodeH265SessionParametersAddInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoEncodeH265SessionParametersAddInfoEXT(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoEncodeH265SessionParametersAddInfoEXT(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &vpsStd_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &spsStd_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &ppsStd_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , vpsStdCount(static_cast(vpsStd_.size())) , pVpsStd(vpsStd_.data()) , spsStdCount(static_cast(spsStd_.size())) , pSpsStd(spsStd_.data()) , ppsStdCount(static_cast(ppsStd_.size())) , pPpsStd(ppsStd_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeH265SessionParametersAddInfoEXT &operator=(VideoEncodeH265SessionParametersAddInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH265SessionParametersAddInfoEXT &operator=(VkVideoEncodeH265SessionParametersAddInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT &setVpsStdCount(uint32_t vpsStdCount_) VULKAN_HPP_NOEXCEPT { vpsStdCount = vpsStdCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT &setPVpsStd(const StdVideoH265VideoParameterSet *pVpsStd_) VULKAN_HPP_NOEXCEPT { pVpsStd = pVpsStd_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoEncodeH265SessionParametersAddInfoEXT & setVpsStd(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &vpsStd_) VULKAN_HPP_NOEXCEPT { vpsStdCount = static_cast(vpsStd_.size()); pVpsStd = vpsStd_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT &setSpsStdCount(uint32_t spsStdCount_) VULKAN_HPP_NOEXCEPT { spsStdCount = spsStdCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT &setPSpsStd(const StdVideoH265SequenceParameterSet *pSpsStd_) VULKAN_HPP_NOEXCEPT { pSpsStd = pSpsStd_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoEncodeH265SessionParametersAddInfoEXT & setSpsStd(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &spsStd_) VULKAN_HPP_NOEXCEPT { spsStdCount = static_cast(spsStd_.size()); pSpsStd = spsStd_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT &setPpsStdCount(uint32_t ppsStdCount_) VULKAN_HPP_NOEXCEPT { ppsStdCount = ppsStdCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT &setPPpsStd(const StdVideoH265PictureParameterSet *pPpsStd_) VULKAN_HPP_NOEXCEPT { pPpsStd = pPpsStd_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoEncodeH265SessionParametersAddInfoEXT & setPpsStd(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &ppsStd_) VULKAN_HPP_NOEXCEPT { ppsStdCount = static_cast(ppsStd_.size()); pPpsStd = ppsStd_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoEncodeH265SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEncodeH265SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, vpsStdCount, pVpsStd, spsStdCount, pSpsStd, ppsStdCount, pPpsStd); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoEncodeH265SessionParametersAddInfoEXT const &) const = default; # else bool operator==(VideoEncodeH265SessionParametersAddInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (vpsStdCount == rhs.vpsStdCount) && (pVpsStd == rhs.pVpsStd) && (spsStdCount == rhs.spsStdCount) && (pSpsStd == rhs.pSpsStd) && (ppsStdCount == rhs.ppsStdCount) && (pPpsStd == rhs.pPpsStd); # endif } bool operator!=(VideoEncodeH265SessionParametersAddInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionParametersAddInfoEXT; const void *pNext = {}; uint32_t vpsStdCount = {}; const StdVideoH265VideoParameterSet *pVpsStd = {}; uint32_t spsStdCount = {}; const StdVideoH265SequenceParameterSet *pSpsStd = {}; uint32_t ppsStdCount = {}; const StdVideoH265PictureParameterSet *pPpsStd = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT) == sizeof(VkVideoEncodeH265SessionParametersAddInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEncodeH265SessionParametersAddInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoEncodeH265SessionParametersAddInfoEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEncodeH265SessionParametersCreateInfoEXT { using NativeType = VkVideoEncodeH265SessionParametersCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionParametersCreateInfoEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersCreateInfoEXT(uint32_t maxVpsStdCount_ = {}, uint32_t maxSpsStdCount_ = {}, uint32_t maxPpsStdCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT *pParametersAddInfo_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), maxVpsStdCount(maxVpsStdCount_), maxSpsStdCount(maxSpsStdCount_), maxPpsStdCount(maxPpsStdCount_), pParametersAddInfo(pParametersAddInfo_) { } VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersCreateInfoEXT(VideoEncodeH265SessionParametersCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH265SessionParametersCreateInfoEXT(VkVideoEncodeH265SessionParametersCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoEncodeH265SessionParametersCreateInfoEXT(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeH265SessionParametersCreateInfoEXT &operator=(VideoEncodeH265SessionParametersCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH265SessionParametersCreateInfoEXT &operator=(VkVideoEncodeH265SessionParametersCreateInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT &setMaxVpsStdCount(uint32_t maxVpsStdCount_) VULKAN_HPP_NOEXCEPT { maxVpsStdCount = maxVpsStdCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT &setMaxSpsStdCount(uint32_t maxSpsStdCount_) VULKAN_HPP_NOEXCEPT { maxSpsStdCount = maxSpsStdCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT &setMaxPpsStdCount(uint32_t maxPpsStdCount_) VULKAN_HPP_NOEXCEPT { maxPpsStdCount = maxPpsStdCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT & setPParametersAddInfo(const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT *pParametersAddInfo_) VULKAN_HPP_NOEXCEPT { pParametersAddInfo = pParametersAddInfo_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoEncodeH265SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEncodeH265SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, maxVpsStdCount, maxSpsStdCount, maxPpsStdCount, pParametersAddInfo); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoEncodeH265SessionParametersCreateInfoEXT const &) const = default; # else bool operator==(VideoEncodeH265SessionParametersCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (maxVpsStdCount == rhs.maxVpsStdCount) && (maxSpsStdCount == rhs.maxSpsStdCount) && (maxPpsStdCount == rhs.maxPpsStdCount) && (pParametersAddInfo == rhs.pParametersAddInfo); # endif } bool operator!=(VideoEncodeH265SessionParametersCreateInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionParametersCreateInfoEXT; const void *pNext = {}; uint32_t maxVpsStdCount = {}; uint32_t maxSpsStdCount = {}; uint32_t maxPpsStdCount = {}; const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT *pParametersAddInfo = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoEXT) == sizeof(VkVideoEncodeH265SessionParametersCreateInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEncodeH265SessionParametersCreateInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoEncodeH265SessionParametersCreateInfoEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEncodeH265VclFrameInfoEXT { using NativeType = VkVideoEncodeH265VclFrameInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265VclFrameInfoEXT; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEncodeH265VclFrameInfoEXT(const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT *pReferenceFinalLists_ = {}, uint32_t naluSliceSegmentEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentEXT *pNaluSliceSegmentEntries_ = {}, const StdVideoEncodeH265PictureInfo *pCurrentPictureInfo_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), pReferenceFinalLists(pReferenceFinalLists_), naluSliceSegmentEntryCount(naluSliceSegmentEntryCount_), pNaluSliceSegmentEntries(pNaluSliceSegmentEntries_), pCurrentPictureInfo(pCurrentPictureInfo_) { } VULKAN_HPP_CONSTEXPR VideoEncodeH265VclFrameInfoEXT(VideoEncodeH265VclFrameInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH265VclFrameInfoEXT(VkVideoEncodeH265VclFrameInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT : VideoEncodeH265VclFrameInfoEXT(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoEncodeH265VclFrameInfoEXT( const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT *pReferenceFinalLists_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &naluSliceSegmentEntries_, const StdVideoEncodeH265PictureInfo *pCurrentPictureInfo_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , pReferenceFinalLists(pReferenceFinalLists_) , naluSliceSegmentEntryCount(static_cast(naluSliceSegmentEntries_.size())) , pNaluSliceSegmentEntries(naluSliceSegmentEntries_.data()) , pCurrentPictureInfo(pCurrentPictureInfo_) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeH265VclFrameInfoEXT &operator=(VideoEncodeH265VclFrameInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeH265VclFrameInfoEXT &operator=(VkVideoEncodeH265VclFrameInfoEXT const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT & setPReferenceFinalLists(const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT *pReferenceFinalLists_) VULKAN_HPP_NOEXCEPT { pReferenceFinalLists = pReferenceFinalLists_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT &setNaluSliceSegmentEntryCount(uint32_t naluSliceSegmentEntryCount_) VULKAN_HPP_NOEXCEPT { naluSliceSegmentEntryCount = naluSliceSegmentEntryCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT & setPNaluSliceSegmentEntries(const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentEXT *pNaluSliceSegmentEntries_) VULKAN_HPP_NOEXCEPT { pNaluSliceSegmentEntries = pNaluSliceSegmentEntries_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoEncodeH265VclFrameInfoEXT &setNaluSliceSegmentEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &naluSliceSegmentEntries_) VULKAN_HPP_NOEXCEPT { naluSliceSegmentEntryCount = static_cast(naluSliceSegmentEntries_.size()); pNaluSliceSegmentEntries = naluSliceSegmentEntries_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT & setPCurrentPictureInfo(const StdVideoEncodeH265PictureInfo *pCurrentPictureInfo_) VULKAN_HPP_NOEXCEPT { pCurrentPictureInfo = pCurrentPictureInfo_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoEncodeH265VclFrameInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEncodeH265VclFrameInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, pReferenceFinalLists, naluSliceSegmentEntryCount, pNaluSliceSegmentEntries, pCurrentPictureInfo); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoEncodeH265VclFrameInfoEXT const &) const = default; # else bool operator==(VideoEncodeH265VclFrameInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (pReferenceFinalLists == rhs.pReferenceFinalLists) && (naluSliceSegmentEntryCount == rhs.naluSliceSegmentEntryCount) && (pNaluSliceSegmentEntries == rhs.pNaluSliceSegmentEntries) && (pCurrentPictureInfo == rhs.pCurrentPictureInfo); # endif } bool operator!=(VideoEncodeH265VclFrameInfoEXT const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265VclFrameInfoEXT; const void *pNext = {}; const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsEXT *pReferenceFinalLists = {}; uint32_t naluSliceSegmentEntryCount = {}; const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentEXT *pNaluSliceSegmentEntries = {}; const StdVideoEncodeH265PictureInfo *pCurrentPictureInfo = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEncodeH265VclFrameInfoEXT) == sizeof(VkVideoEncodeH265VclFrameInfoEXT), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEncodeH265VclFrameInfoEXT is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoEncodeH265VclFrameInfoEXT; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEncodeInfoKHR { using NativeType = VkVideoEncodeInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeInfoKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEncodeInfoKHR(VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_ = {}, uint32_t qualityLevel_ = {}, VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange_ = {}, VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR srcPictureResource_ = {}, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR *pSetupReferenceSlot_ = {}, uint32_t referenceSlotCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR *pReferenceSlots_ = {}, uint32_t precedingExternallyEncodedBytes_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), qualityLevel(qualityLevel_), dstBitstreamBuffer(dstBitstreamBuffer_), dstBitstreamBufferOffset(dstBitstreamBufferOffset_), dstBitstreamBufferMaxRange(dstBitstreamBufferMaxRange_), srcPictureResource(srcPictureResource_), pSetupReferenceSlot(pSetupReferenceSlot_), referenceSlotCount(referenceSlotCount_), pReferenceSlots(pReferenceSlots_), precedingExternallyEncodedBytes(precedingExternallyEncodedBytes_) { } VULKAN_HPP_CONSTEXPR VideoEncodeInfoKHR(VideoEncodeInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeInfoKHR(VkVideoEncodeInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : VideoEncodeInfoKHR(*reinterpret_cast(&rhs)) {} # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoEncodeInfoKHR(VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_, uint32_t qualityLevel_, VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer_, VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset_, VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange_, VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR srcPictureResource_, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR *pSetupReferenceSlot_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &referenceSlots_, uint32_t precedingExternallyEncodedBytes_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , qualityLevel(qualityLevel_) , dstBitstreamBuffer(dstBitstreamBuffer_) , dstBitstreamBufferOffset(dstBitstreamBufferOffset_) , dstBitstreamBufferMaxRange(dstBitstreamBufferMaxRange_) , srcPictureResource(srcPictureResource_) , pSetupReferenceSlot(pSetupReferenceSlot_) , referenceSlotCount(static_cast(referenceSlots_.size())) , pReferenceSlots(referenceSlots_.data()) , precedingExternallyEncodedBytes(precedingExternallyEncodedBytes_) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeInfoKHR &operator=(VideoEncodeInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeInfoKHR &operator=(VkVideoEncodeInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR &setFlags(VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR &setQualityLevel(uint32_t qualityLevel_) VULKAN_HPP_NOEXCEPT { qualityLevel = qualityLevel_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR &setDstBitstreamBuffer(VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer_) VULKAN_HPP_NOEXCEPT { dstBitstreamBuffer = dstBitstreamBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR &setDstBitstreamBufferOffset(VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset_) VULKAN_HPP_NOEXCEPT { dstBitstreamBufferOffset = dstBitstreamBufferOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setDstBitstreamBufferMaxRange(VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange_) VULKAN_HPP_NOEXCEPT { dstBitstreamBufferMaxRange = dstBitstreamBufferMaxRange_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setSrcPictureResource(VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR const &srcPictureResource_) VULKAN_HPP_NOEXCEPT { srcPictureResource = srcPictureResource_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setPSetupReferenceSlot(const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR *pSetupReferenceSlot_) VULKAN_HPP_NOEXCEPT { pSetupReferenceSlot = pSetupReferenceSlot_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR &setReferenceSlotCount(uint32_t referenceSlotCount_) VULKAN_HPP_NOEXCEPT { referenceSlotCount = referenceSlotCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR &setPReferenceSlots(const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR *pReferenceSlots_) VULKAN_HPP_NOEXCEPT { pReferenceSlots = pReferenceSlots_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoEncodeInfoKHR &setReferenceSlots( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &referenceSlots_) VULKAN_HPP_NOEXCEPT { referenceSlotCount = static_cast(referenceSlots_.size()); pReferenceSlots = referenceSlots_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR &setPrecedingExternallyEncodedBytes(uint32_t precedingExternallyEncodedBytes_) VULKAN_HPP_NOEXCEPT { precedingExternallyEncodedBytes = precedingExternallyEncodedBytes_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoEncodeInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEncodeInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, qualityLevel, dstBitstreamBuffer, dstBitstreamBufferOffset, dstBitstreamBufferMaxRange, srcPictureResource, pSetupReferenceSlot, referenceSlotCount, pReferenceSlots, precedingExternallyEncodedBytes); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoEncodeInfoKHR const &) const = default; # else bool operator==(VideoEncodeInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (qualityLevel == rhs.qualityLevel) && (dstBitstreamBuffer == rhs.dstBitstreamBuffer) && (dstBitstreamBufferOffset == rhs.dstBitstreamBufferOffset) && (dstBitstreamBufferMaxRange == rhs.dstBitstreamBufferMaxRange) && (srcPictureResource == rhs.srcPictureResource) && (pSetupReferenceSlot == rhs.pSetupReferenceSlot) && (referenceSlotCount == rhs.referenceSlotCount) && (pReferenceSlots == rhs.pReferenceSlots) && (precedingExternallyEncodedBytes == rhs.precedingExternallyEncodedBytes); # endif } bool operator!=(VideoEncodeInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags = {}; uint32_t qualityLevel = {}; VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer = {}; VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset = {}; VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange = {}; VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR srcPictureResource = {}; const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR *pSetupReferenceSlot = {}; uint32_t referenceSlotCount = {}; const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR *pReferenceSlots = {}; uint32_t precedingExternallyEncodedBytes = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR) == sizeof(VkVideoEncodeInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEncodeInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoEncodeInfoKHR; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEncodeRateControlLayerInfoKHR { using NativeType = VkVideoEncodeRateControlLayerInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeRateControlLayerInfoKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEncodeRateControlLayerInfoKHR(uint32_t averageBitrate_ = {}, uint32_t maxBitrate_ = {}, uint32_t frameRateNumerator_ = {}, uint32_t frameRateDenominator_ = {}, uint32_t virtualBufferSizeInMs_ = {}, uint32_t initialVirtualBufferSizeInMs_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), averageBitrate(averageBitrate_), maxBitrate(maxBitrate_), frameRateNumerator(frameRateNumerator_), frameRateDenominator(frameRateDenominator_), virtualBufferSizeInMs(virtualBufferSizeInMs_), initialVirtualBufferSizeInMs(initialVirtualBufferSizeInMs_) { } VULKAN_HPP_CONSTEXPR VideoEncodeRateControlLayerInfoKHR(VideoEncodeRateControlLayerInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeRateControlLayerInfoKHR(VkVideoEncodeRateControlLayerInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : VideoEncodeRateControlLayerInfoKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeRateControlLayerInfoKHR &operator=(VideoEncodeRateControlLayerInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeRateControlLayerInfoKHR &operator=(VkVideoEncodeRateControlLayerInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR &setAverageBitrate(uint32_t averageBitrate_) VULKAN_HPP_NOEXCEPT { averageBitrate = averageBitrate_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR &setMaxBitrate(uint32_t maxBitrate_) VULKAN_HPP_NOEXCEPT { maxBitrate = maxBitrate_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR &setFrameRateNumerator(uint32_t frameRateNumerator_) VULKAN_HPP_NOEXCEPT { frameRateNumerator = frameRateNumerator_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR &setFrameRateDenominator(uint32_t frameRateDenominator_) VULKAN_HPP_NOEXCEPT { frameRateDenominator = frameRateDenominator_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR &setVirtualBufferSizeInMs(uint32_t virtualBufferSizeInMs_) VULKAN_HPP_NOEXCEPT { virtualBufferSizeInMs = virtualBufferSizeInMs_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR &setInitialVirtualBufferSizeInMs(uint32_t initialVirtualBufferSizeInMs_) VULKAN_HPP_NOEXCEPT { initialVirtualBufferSizeInMs = initialVirtualBufferSizeInMs_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoEncodeRateControlLayerInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEncodeRateControlLayerInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, averageBitrate, maxBitrate, frameRateNumerator, frameRateDenominator, virtualBufferSizeInMs, initialVirtualBufferSizeInMs); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoEncodeRateControlLayerInfoKHR const &) const = default; # else bool operator==(VideoEncodeRateControlLayerInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (averageBitrate == rhs.averageBitrate) && (maxBitrate == rhs.maxBitrate) && (frameRateNumerator == rhs.frameRateNumerator) && (frameRateDenominator == rhs.frameRateDenominator) && (virtualBufferSizeInMs == rhs.virtualBufferSizeInMs) && (initialVirtualBufferSizeInMs == rhs.initialVirtualBufferSizeInMs); # endif } bool operator!=(VideoEncodeRateControlLayerInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeRateControlLayerInfoKHR; const void *pNext = {}; uint32_t averageBitrate = {}; uint32_t maxBitrate = {}; uint32_t frameRateNumerator = {}; uint32_t frameRateDenominator = {}; uint32_t virtualBufferSizeInMs = {}; uint32_t initialVirtualBufferSizeInMs = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR) == sizeof(VkVideoEncodeRateControlLayerInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEncodeRateControlLayerInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoEncodeRateControlLayerInfoKHR; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEncodeRateControlInfoKHR { using NativeType = VkVideoEncodeRateControlInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeRateControlInfoKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEncodeRateControlInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_ = VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eNone, uint8_t layerCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR *pLayerConfigs_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), rateControlMode(rateControlMode_), layerCount(layerCount_), pLayerConfigs(pLayerConfigs_) { } VULKAN_HPP_CONSTEXPR VideoEncodeRateControlInfoKHR(VideoEncodeRateControlInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeRateControlInfoKHR(VkVideoEncodeRateControlInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : VideoEncodeRateControlInfoKHR(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoEncodeRateControlInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_, VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &layerConfigs_, const void *pNext_ = nullptr) : pNext(pNext_) , flags(flags_) , rateControlMode(rateControlMode_) , layerCount(static_cast(layerConfigs_.size())) , pLayerConfigs(layerConfigs_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeRateControlInfoKHR &operator=(VideoEncodeRateControlInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEncodeRateControlInfoKHR &operator=(VkVideoEncodeRateControlInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR &setFlags(VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setRateControlMode(VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_) VULKAN_HPP_NOEXCEPT { rateControlMode = rateControlMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR &setLayerCount(uint8_t layerCount_) VULKAN_HPP_NOEXCEPT { layerCount = layerCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setPLayerConfigs(const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR *pLayerConfigs_) VULKAN_HPP_NOEXCEPT { pLayerConfigs = pLayerConfigs_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) VideoEncodeRateControlInfoKHR &setLayerConfigs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &layerConfigs_) VULKAN_HPP_NOEXCEPT { layerCount = static_cast(layerConfigs_.size()); pLayerConfigs = layerConfigs_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoEncodeRateControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEncodeRateControlInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, rateControlMode, layerCount, pLayerConfigs); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoEncodeRateControlInfoKHR const &) const = default; # else bool operator==(VideoEncodeRateControlInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (rateControlMode == rhs.rateControlMode) && (layerCount == rhs.layerCount) && (pLayerConfigs == rhs.pLayerConfigs); # endif } bool operator!=(VideoEncodeRateControlInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeRateControlInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags = {}; VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode = VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eNone; uint8_t layerCount = {}; const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR *pLayerConfigs = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR) == sizeof(VkVideoEncodeRateControlInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEncodeRateControlInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoEncodeRateControlInfoKHR; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoEndCodingInfoKHR { using NativeType = VkVideoEndCodingInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEndCodingInfoKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoEndCodingInfoKHR(VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_) { } VULKAN_HPP_CONSTEXPR VideoEndCodingInfoKHR(VideoEndCodingInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEndCodingInfoKHR(VkVideoEndCodingInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : VideoEndCodingInfoKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEndCodingInfoKHR &operator=(VideoEndCodingInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoEndCodingInfoKHR &operator=(VkVideoEndCodingInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoEndCodingInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEndCodingInfoKHR &setFlags(VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoEndCodingInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoEndCodingInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoEndCodingInfoKHR const &) const = default; # else bool operator==(VideoEndCodingInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags); # endif } bool operator!=(VideoEndCodingInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEndCodingInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR) == sizeof(VkVideoEndCodingInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoEndCodingInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoEndCodingInfoKHR; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoFormatPropertiesKHR { using NativeType = VkVideoFormatPropertiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoFormatPropertiesKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoFormatPropertiesKHR(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), format(format_) { } VULKAN_HPP_CONSTEXPR VideoFormatPropertiesKHR(VideoFormatPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoFormatPropertiesKHR(VkVideoFormatPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT : VideoFormatPropertiesKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoFormatPropertiesKHR &operator=(VideoFormatPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoFormatPropertiesKHR &operator=(VkVideoFormatPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } explicit operator VkVideoFormatPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoFormatPropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, format); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoFormatPropertiesKHR const &) const = default; # else bool operator==(VideoFormatPropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (format == rhs.format); # endif } bool operator!=(VideoFormatPropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoFormatPropertiesKHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR) == sizeof(VkVideoFormatPropertiesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoFormatPropertiesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoFormatPropertiesKHR; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoGetMemoryPropertiesKHR { using NativeType = VkVideoGetMemoryPropertiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoGetMemoryPropertiesKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoGetMemoryPropertiesKHR(uint32_t memoryBindIndex_ = {}, VULKAN_HPP_NAMESPACE::MemoryRequirements2 *pMemoryRequirements_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), memoryBindIndex(memoryBindIndex_), pMemoryRequirements(pMemoryRequirements_) { } VULKAN_HPP_CONSTEXPR VideoGetMemoryPropertiesKHR(VideoGetMemoryPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoGetMemoryPropertiesKHR(VkVideoGetMemoryPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT : VideoGetMemoryPropertiesKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoGetMemoryPropertiesKHR &operator=(VideoGetMemoryPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoGetMemoryPropertiesKHR &operator=(VkVideoGetMemoryPropertiesKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoGetMemoryPropertiesKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoGetMemoryPropertiesKHR &setMemoryBindIndex(uint32_t memoryBindIndex_) VULKAN_HPP_NOEXCEPT { memoryBindIndex = memoryBindIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoGetMemoryPropertiesKHR & setPMemoryRequirements(VULKAN_HPP_NAMESPACE::MemoryRequirements2 *pMemoryRequirements_) VULKAN_HPP_NOEXCEPT { pMemoryRequirements = pMemoryRequirements_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoGetMemoryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoGetMemoryPropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, memoryBindIndex, pMemoryRequirements); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoGetMemoryPropertiesKHR const &) const = default; # else bool operator==(VideoGetMemoryPropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (memoryBindIndex == rhs.memoryBindIndex) && (pMemoryRequirements == rhs.pMemoryRequirements); # endif } bool operator!=(VideoGetMemoryPropertiesKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoGetMemoryPropertiesKHR; const void *pNext = {}; uint32_t memoryBindIndex = {}; VULKAN_HPP_NAMESPACE::MemoryRequirements2 *pMemoryRequirements = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR) == sizeof(VkVideoGetMemoryPropertiesKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoGetMemoryPropertiesKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoGetMemoryPropertiesKHR; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoQueueFamilyProperties2KHR { using NativeType = VkVideoQueueFamilyProperties2KHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoQueueFamilyProperties2KHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoQueueFamilyProperties2KHR(VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations_ = {}, void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), videoCodecOperations(videoCodecOperations_) { } VULKAN_HPP_CONSTEXPR VideoQueueFamilyProperties2KHR(VideoQueueFamilyProperties2KHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoQueueFamilyProperties2KHR(VkVideoQueueFamilyProperties2KHR const &rhs) VULKAN_HPP_NOEXCEPT : VideoQueueFamilyProperties2KHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoQueueFamilyProperties2KHR &operator=(VideoQueueFamilyProperties2KHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoQueueFamilyProperties2KHR &operator=(VkVideoQueueFamilyProperties2KHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoQueueFamilyProperties2KHR &setPNext(void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoQueueFamilyProperties2KHR & setVideoCodecOperations(VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations_) VULKAN_HPP_NOEXCEPT { videoCodecOperations = videoCodecOperations_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoQueueFamilyProperties2KHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoQueueFamilyProperties2KHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, videoCodecOperations); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoQueueFamilyProperties2KHR const &) const = default; # else bool operator==(VideoQueueFamilyProperties2KHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (videoCodecOperations == rhs.videoCodecOperations); # endif } bool operator!=(VideoQueueFamilyProperties2KHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoQueueFamilyProperties2KHR; void *pNext = {}; VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoQueueFamilyProperties2KHR) == sizeof(VkVideoQueueFamilyProperties2KHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoQueueFamilyProperties2KHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoQueueFamilyProperties2KHR; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoSessionCreateInfoKHR { using NativeType = VkVideoSessionCreateInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionCreateInfoKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR(uint32_t queueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags_ = {}, const VULKAN_HPP_NAMESPACE::VideoProfileKHR *pVideoProfile_ = {}, VULKAN_HPP_NAMESPACE::Format pictureFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent_ = {}, VULKAN_HPP_NAMESPACE::Format referencePicturesFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint32_t maxReferencePicturesSlotsCount_ = {}, uint32_t maxReferencePicturesActiveCount_ = {}, const VULKAN_HPP_NAMESPACE::ExtensionProperties *pStdHeaderVersion_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), queueFamilyIndex(queueFamilyIndex_), flags(flags_), pVideoProfile(pVideoProfile_), pictureFormat(pictureFormat_), maxCodedExtent(maxCodedExtent_), referencePicturesFormat(referencePicturesFormat_), maxReferencePicturesSlotsCount(maxReferencePicturesSlotsCount_), maxReferencePicturesActiveCount(maxReferencePicturesActiveCount_), pStdHeaderVersion(pStdHeaderVersion_) { } VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR(VideoSessionCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoSessionCreateInfoKHR(VkVideoSessionCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : VideoSessionCreateInfoKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoSessionCreateInfoKHR &operator=(VideoSessionCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoSessionCreateInfoKHR &operator=(VkVideoSessionCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR &setQueueFamilyIndex(uint32_t queueFamilyIndex_) VULKAN_HPP_NOEXCEPT { queueFamilyIndex = queueFamilyIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR &setFlags(VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR &setPVideoProfile(const VULKAN_HPP_NAMESPACE::VideoProfileKHR *pVideoProfile_) VULKAN_HPP_NOEXCEPT { pVideoProfile = pVideoProfile_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR &setPictureFormat(VULKAN_HPP_NAMESPACE::Format pictureFormat_) VULKAN_HPP_NOEXCEPT { pictureFormat = pictureFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR &setMaxCodedExtent(VULKAN_HPP_NAMESPACE::Extent2D const &maxCodedExtent_) VULKAN_HPP_NOEXCEPT { maxCodedExtent = maxCodedExtent_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR &setReferencePicturesFormat(VULKAN_HPP_NAMESPACE::Format referencePicturesFormat_) VULKAN_HPP_NOEXCEPT { referencePicturesFormat = referencePicturesFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR &setMaxReferencePicturesSlotsCount(uint32_t maxReferencePicturesSlotsCount_) VULKAN_HPP_NOEXCEPT { maxReferencePicturesSlotsCount = maxReferencePicturesSlotsCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR &setMaxReferencePicturesActiveCount(uint32_t maxReferencePicturesActiveCount_) VULKAN_HPP_NOEXCEPT { maxReferencePicturesActiveCount = maxReferencePicturesActiveCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setPStdHeaderVersion(const VULKAN_HPP_NAMESPACE::ExtensionProperties *pStdHeaderVersion_) VULKAN_HPP_NOEXCEPT { pStdHeaderVersion = pStdHeaderVersion_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoSessionCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoSessionCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, queueFamilyIndex, flags, pVideoProfile, pictureFormat, maxCodedExtent, referencePicturesFormat, maxReferencePicturesSlotsCount, maxReferencePicturesActiveCount, pStdHeaderVersion); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoSessionCreateInfoKHR const &) const = default; # else bool operator==(VideoSessionCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (queueFamilyIndex == rhs.queueFamilyIndex) && (flags == rhs.flags) && (pVideoProfile == rhs.pVideoProfile) && (pictureFormat == rhs.pictureFormat) && (maxCodedExtent == rhs.maxCodedExtent) && (referencePicturesFormat == rhs.referencePicturesFormat) && (maxReferencePicturesSlotsCount == rhs.maxReferencePicturesSlotsCount) && (maxReferencePicturesActiveCount == rhs.maxReferencePicturesActiveCount) && (pStdHeaderVersion == rhs.pStdHeaderVersion); # endif } bool operator!=(VideoSessionCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionCreateInfoKHR; const void *pNext = {}; uint32_t queueFamilyIndex = {}; VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags = {}; const VULKAN_HPP_NAMESPACE::VideoProfileKHR *pVideoProfile = {}; VULKAN_HPP_NAMESPACE::Format pictureFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent = {}; VULKAN_HPP_NAMESPACE::Format referencePicturesFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; uint32_t maxReferencePicturesSlotsCount = {}; uint32_t maxReferencePicturesActiveCount = {}; const VULKAN_HPP_NAMESPACE::ExtensionProperties *pStdHeaderVersion = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR) == sizeof(VkVideoSessionCreateInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoSessionCreateInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoSessionCreateInfoKHR; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoSessionParametersCreateInfoKHR { using NativeType = VkVideoSessionParametersCreateInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionParametersCreateInfoKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoSessionParametersCreateInfoKHR(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate_ = {}, VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), videoSessionParametersTemplate(videoSessionParametersTemplate_), videoSession(videoSession_) { } VULKAN_HPP_CONSTEXPR VideoSessionParametersCreateInfoKHR(VideoSessionParametersCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoSessionParametersCreateInfoKHR(VkVideoSessionParametersCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : VideoSessionParametersCreateInfoKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoSessionParametersCreateInfoKHR &operator=(VideoSessionParametersCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoSessionParametersCreateInfoKHR &operator=(VkVideoSessionParametersCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & setVideoSessionParametersTemplate(VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate_) VULKAN_HPP_NOEXCEPT { videoSessionParametersTemplate = videoSessionParametersTemplate_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR &setVideoSession(VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_) VULKAN_HPP_NOEXCEPT { videoSession = videoSession_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoSessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoSessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, videoSessionParametersTemplate, videoSession); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoSessionParametersCreateInfoKHR const &) const = default; # else bool operator==(VideoSessionParametersCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (videoSessionParametersTemplate == rhs.videoSessionParametersTemplate) && (videoSession == rhs.videoSession); # endif } bool operator!=(VideoSessionParametersCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionParametersCreateInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate = {}; VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR) == sizeof(VkVideoSessionParametersCreateInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoSessionParametersCreateInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoSessionParametersCreateInfoKHR; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_ENABLE_BETA_EXTENSIONS) struct VideoSessionParametersUpdateInfoKHR { using NativeType = VkVideoSessionParametersUpdateInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionParametersUpdateInfoKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR VideoSessionParametersUpdateInfoKHR(uint32_t updateSequenceCount_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), updateSequenceCount(updateSequenceCount_) { } VULKAN_HPP_CONSTEXPR VideoSessionParametersUpdateInfoKHR(VideoSessionParametersUpdateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoSessionParametersUpdateInfoKHR(VkVideoSessionParametersUpdateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : VideoSessionParametersUpdateInfoKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoSessionParametersUpdateInfoKHR &operator=(VideoSessionParametersUpdateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; VideoSessionParametersUpdateInfoKHR &operator=(VkVideoSessionParametersUpdateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersUpdateInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersUpdateInfoKHR &setUpdateSequenceCount(uint32_t updateSequenceCount_) VULKAN_HPP_NOEXCEPT { updateSequenceCount = updateSequenceCount_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkVideoSessionParametersUpdateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkVideoSessionParametersUpdateInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, updateSequenceCount); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(VideoSessionParametersUpdateInfoKHR const &) const = default; # else bool operator==(VideoSessionParametersUpdateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (updateSequenceCount == rhs.updateSequenceCount); # endif } bool operator!=(VideoSessionParametersUpdateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionParametersUpdateInfoKHR; const void *pNext = {}; uint32_t updateSequenceCount = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR) == sizeof(VkVideoSessionParametersUpdateInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "VideoSessionParametersUpdateInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = VideoSessionParametersUpdateInfoKHR; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined(VK_USE_PLATFORM_WAYLAND_KHR) struct WaylandSurfaceCreateInfoKHR { using NativeType = VkWaylandSurfaceCreateInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWaylandSurfaceCreateInfoKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ = {}, struct wl_display *display_ = {}, struct wl_surface *surface_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), display(display_), surface(surface_) { } VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR(WaylandSurfaceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; WaylandSurfaceCreateInfoKHR(VkWaylandSurfaceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : WaylandSurfaceCreateInfoKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ WaylandSurfaceCreateInfoKHR &operator=(WaylandSurfaceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; WaylandSurfaceCreateInfoKHR &operator=(VkWaylandSurfaceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR &setFlags(VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR &setDisplay(struct wl_display *display_) VULKAN_HPP_NOEXCEPT { display = display_; return *this; } VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR &setSurface(struct wl_surface *surface_) VULKAN_HPP_NOEXCEPT { surface = surface_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkWaylandSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkWaylandSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, display, surface); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(WaylandSurfaceCreateInfoKHR const &) const = default; # else bool operator==(WaylandSurfaceCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (display == rhs.display) && (surface == rhs.surface); # endif } bool operator!=(WaylandSurfaceCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWaylandSurfaceCreateInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags = {}; struct wl_display *display = {}; struct wl_surface *surface = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR) == sizeof(VkWaylandSurfaceCreateInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "WaylandSurfaceCreateInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = WaylandSurfaceCreateInfoKHR; }; #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ #if defined(VK_USE_PLATFORM_WIN32_KHR) struct Win32KeyedMutexAcquireReleaseInfoKHR { using NativeType = VkWin32KeyedMutexAcquireReleaseInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR(uint32_t acquireCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory *pAcquireSyncs_ = {}, const uint64_t *pAcquireKeys_ = {}, const uint32_t *pAcquireTimeouts_ = {}, uint32_t releaseCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory *pReleaseSyncs_ = {}, const uint64_t *pReleaseKeys_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), acquireCount(acquireCount_), pAcquireSyncs(pAcquireSyncs_), pAcquireKeys(pAcquireKeys_), pAcquireTimeouts(pAcquireTimeouts_), releaseCount(releaseCount_), pReleaseSyncs(pReleaseSyncs_), pReleaseKeys(pReleaseKeys_) { } VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR(Win32KeyedMutexAcquireReleaseInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; Win32KeyedMutexAcquireReleaseInfoKHR(VkWin32KeyedMutexAcquireReleaseInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : Win32KeyedMutexAcquireReleaseInfoKHR(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) Win32KeyedMutexAcquireReleaseInfoKHR(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &acquireSyncs_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &acquireKeys_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &acquireTimeouts_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &releaseSyncs_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &releaseKeys_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , acquireCount(static_cast(acquireSyncs_.size())) , pAcquireSyncs(acquireSyncs_.data()) , pAcquireKeys(acquireKeys_.data()) , pAcquireTimeouts(acquireTimeouts_.data()) , releaseCount(static_cast(releaseSyncs_.size())) , pReleaseSyncs(releaseSyncs_.data()) , pReleaseKeys(releaseKeys_.data()) { # ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT(acquireSyncs_.size() == acquireKeys_.size()); VULKAN_HPP_ASSERT(acquireSyncs_.size() == acquireTimeouts_.size()); VULKAN_HPP_ASSERT(acquireKeys_.size() == acquireTimeouts_.size()); # else if(acquireSyncs_.size() != acquireKeys_.size()) { throw LogicError(VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireKeys_.size()"); } if(acquireSyncs_.size() != acquireTimeouts_.size()) { throw LogicError(VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireTimeouts_.size()"); } if(acquireKeys_.size() != acquireTimeouts_.size()) { throw LogicError(VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireKeys_.size() != acquireTimeouts_.size()"); } # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ # ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT(releaseSyncs_.size() == releaseKeys_.size()); # else if(releaseSyncs_.size() != releaseKeys_.size()) { throw LogicError(VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: releaseSyncs_.size() != releaseKeys_.size()"); } # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ Win32KeyedMutexAcquireReleaseInfoKHR &operator=(Win32KeyedMutexAcquireReleaseInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; Win32KeyedMutexAcquireReleaseInfoKHR &operator=(VkWin32KeyedMutexAcquireReleaseInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR &setAcquireCount(uint32_t acquireCount_) VULKAN_HPP_NOEXCEPT { acquireCount = acquireCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireSyncs(const VULKAN_HPP_NAMESPACE::DeviceMemory *pAcquireSyncs_) VULKAN_HPP_NOEXCEPT { pAcquireSyncs = pAcquireSyncs_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireSyncs(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &acquireSyncs_) VULKAN_HPP_NOEXCEPT { acquireCount = static_cast(acquireSyncs_.size()); pAcquireSyncs = acquireSyncs_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR &setPAcquireKeys(const uint64_t *pAcquireKeys_) VULKAN_HPP_NOEXCEPT { pAcquireKeys = pAcquireKeys_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireKeys(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &acquireKeys_) VULKAN_HPP_NOEXCEPT { acquireCount = static_cast(acquireKeys_.size()); pAcquireKeys = acquireKeys_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR &setPAcquireTimeouts(const uint32_t *pAcquireTimeouts_) VULKAN_HPP_NOEXCEPT { pAcquireTimeouts = pAcquireTimeouts_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireTimeouts(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &acquireTimeouts_) VULKAN_HPP_NOEXCEPT { acquireCount = static_cast(acquireTimeouts_.size()); pAcquireTimeouts = acquireTimeouts_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR &setReleaseCount(uint32_t releaseCount_) VULKAN_HPP_NOEXCEPT { releaseCount = releaseCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseSyncs(const VULKAN_HPP_NAMESPACE::DeviceMemory *pReleaseSyncs_) VULKAN_HPP_NOEXCEPT { pReleaseSyncs = pReleaseSyncs_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseSyncs(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &releaseSyncs_) VULKAN_HPP_NOEXCEPT { releaseCount = static_cast(releaseSyncs_.size()); pReleaseSyncs = releaseSyncs_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR &setPReleaseKeys(const uint64_t *pReleaseKeys_) VULKAN_HPP_NOEXCEPT { pReleaseKeys = pReleaseKeys_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseKeys(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &releaseKeys_) VULKAN_HPP_NOEXCEPT { releaseCount = static_cast(releaseKeys_.size()); pReleaseKeys = releaseKeys_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkWin32KeyedMutexAcquireReleaseInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkWin32KeyedMutexAcquireReleaseInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, acquireCount, pAcquireSyncs, pAcquireKeys, pAcquireTimeouts, releaseCount, pReleaseSyncs, pReleaseKeys); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(Win32KeyedMutexAcquireReleaseInfoKHR const &) const = default; # else bool operator==(Win32KeyedMutexAcquireReleaseInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (acquireCount == rhs.acquireCount) && (pAcquireSyncs == rhs.pAcquireSyncs) && (pAcquireKeys == rhs.pAcquireKeys) && (pAcquireTimeouts == rhs.pAcquireTimeouts) && (releaseCount == rhs.releaseCount) && (pReleaseSyncs == rhs.pReleaseSyncs) && (pReleaseKeys == rhs.pReleaseKeys); # endif } bool operator!=(Win32KeyedMutexAcquireReleaseInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR; const void *pNext = {}; uint32_t acquireCount = {}; const VULKAN_HPP_NAMESPACE::DeviceMemory *pAcquireSyncs = {}; const uint64_t *pAcquireKeys = {}; const uint32_t *pAcquireTimeouts = {}; uint32_t releaseCount = {}; const VULKAN_HPP_NAMESPACE::DeviceMemory *pReleaseSyncs = {}; const uint64_t *pReleaseKeys = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR) == sizeof(VkWin32KeyedMutexAcquireReleaseInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "Win32KeyedMutexAcquireReleaseInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = Win32KeyedMutexAcquireReleaseInfoKHR; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #if defined(VK_USE_PLATFORM_WIN32_KHR) struct Win32KeyedMutexAcquireReleaseInfoNV { using NativeType = VkWin32KeyedMutexAcquireReleaseInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV(uint32_t acquireCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory *pAcquireSyncs_ = {}, const uint64_t *pAcquireKeys_ = {}, const uint32_t *pAcquireTimeoutMilliseconds_ = {}, uint32_t releaseCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory *pReleaseSyncs_ = {}, const uint64_t *pReleaseKeys_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), acquireCount(acquireCount_), pAcquireSyncs(pAcquireSyncs_), pAcquireKeys(pAcquireKeys_), pAcquireTimeoutMilliseconds(pAcquireTimeoutMilliseconds_), releaseCount(releaseCount_), pReleaseSyncs(pReleaseSyncs_), pReleaseKeys(pReleaseKeys_) { } VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV(Win32KeyedMutexAcquireReleaseInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; Win32KeyedMutexAcquireReleaseInfoNV(VkWin32KeyedMutexAcquireReleaseInfoNV const &rhs) VULKAN_HPP_NOEXCEPT : Win32KeyedMutexAcquireReleaseInfoNV(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) Win32KeyedMutexAcquireReleaseInfoNV(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &acquireSyncs_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &acquireKeys_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &acquireTimeoutMilliseconds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &releaseSyncs_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &releaseKeys_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , acquireCount(static_cast(acquireSyncs_.size())) , pAcquireSyncs(acquireSyncs_.data()) , pAcquireKeys(acquireKeys_.data()) , pAcquireTimeoutMilliseconds(acquireTimeoutMilliseconds_.data()) , releaseCount(static_cast(releaseSyncs_.size())) , pReleaseSyncs(releaseSyncs_.data()) , pReleaseKeys(releaseKeys_.data()) { # ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT(acquireSyncs_.size() == acquireKeys_.size()); VULKAN_HPP_ASSERT(acquireSyncs_.size() == acquireTimeoutMilliseconds_.size()); VULKAN_HPP_ASSERT(acquireKeys_.size() == acquireTimeoutMilliseconds_.size()); # else if(acquireSyncs_.size() != acquireKeys_.size()) { throw LogicError(VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireKeys_.size()"); } if(acquireSyncs_.size() != acquireTimeoutMilliseconds_.size()) { throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireTimeoutMilliseconds_.size()"); } if(acquireKeys_.size() != acquireTimeoutMilliseconds_.size()) { throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireKeys_.size() != acquireTimeoutMilliseconds_.size()"); } # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ # ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT(releaseSyncs_.size() == releaseKeys_.size()); # else if(releaseSyncs_.size() != releaseKeys_.size()) { throw LogicError(VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: releaseSyncs_.size() != releaseKeys_.size()"); } # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ Win32KeyedMutexAcquireReleaseInfoNV &operator=(Win32KeyedMutexAcquireReleaseInfoNV const &rhs) VULKAN_HPP_NOEXCEPT = default; Win32KeyedMutexAcquireReleaseInfoNV &operator=(VkWin32KeyedMutexAcquireReleaseInfoNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV &setAcquireCount(uint32_t acquireCount_) VULKAN_HPP_NOEXCEPT { acquireCount = acquireCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireSyncs(const VULKAN_HPP_NAMESPACE::DeviceMemory *pAcquireSyncs_) VULKAN_HPP_NOEXCEPT { pAcquireSyncs = pAcquireSyncs_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) Win32KeyedMutexAcquireReleaseInfoNV & setAcquireSyncs(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &acquireSyncs_) VULKAN_HPP_NOEXCEPT { acquireCount = static_cast(acquireSyncs_.size()); pAcquireSyncs = acquireSyncs_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV &setPAcquireKeys(const uint64_t *pAcquireKeys_) VULKAN_HPP_NOEXCEPT { pAcquireKeys = pAcquireKeys_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) Win32KeyedMutexAcquireReleaseInfoNV & setAcquireKeys(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &acquireKeys_) VULKAN_HPP_NOEXCEPT { acquireCount = static_cast(acquireKeys_.size()); pAcquireKeys = acquireKeys_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireTimeoutMilliseconds(const uint32_t *pAcquireTimeoutMilliseconds_) VULKAN_HPP_NOEXCEPT { pAcquireTimeoutMilliseconds = pAcquireTimeoutMilliseconds_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) Win32KeyedMutexAcquireReleaseInfoNV & setAcquireTimeoutMilliseconds(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &acquireTimeoutMilliseconds_) VULKAN_HPP_NOEXCEPT { acquireCount = static_cast(acquireTimeoutMilliseconds_.size()); pAcquireTimeoutMilliseconds = acquireTimeoutMilliseconds_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV &setReleaseCount(uint32_t releaseCount_) VULKAN_HPP_NOEXCEPT { releaseCount = releaseCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseSyncs(const VULKAN_HPP_NAMESPACE::DeviceMemory *pReleaseSyncs_) VULKAN_HPP_NOEXCEPT { pReleaseSyncs = pReleaseSyncs_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) Win32KeyedMutexAcquireReleaseInfoNV & setReleaseSyncs(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &releaseSyncs_) VULKAN_HPP_NOEXCEPT { releaseCount = static_cast(releaseSyncs_.size()); pReleaseSyncs = releaseSyncs_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV &setPReleaseKeys(const uint64_t *pReleaseKeys_) VULKAN_HPP_NOEXCEPT { pReleaseKeys = pReleaseKeys_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) Win32KeyedMutexAcquireReleaseInfoNV & setReleaseKeys(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &releaseKeys_) VULKAN_HPP_NOEXCEPT { releaseCount = static_cast(releaseKeys_.size()); pReleaseKeys = releaseKeys_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkWin32KeyedMutexAcquireReleaseInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkWin32KeyedMutexAcquireReleaseInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, acquireCount, pAcquireSyncs, pAcquireKeys, pAcquireTimeoutMilliseconds, releaseCount, pReleaseSyncs, pReleaseKeys); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(Win32KeyedMutexAcquireReleaseInfoNV const &) const = default; # else bool operator==(Win32KeyedMutexAcquireReleaseInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (acquireCount == rhs.acquireCount) && (pAcquireSyncs == rhs.pAcquireSyncs) && (pAcquireKeys == rhs.pAcquireKeys) && (pAcquireTimeoutMilliseconds == rhs.pAcquireTimeoutMilliseconds) && (releaseCount == rhs.releaseCount) && (pReleaseSyncs == rhs.pReleaseSyncs) && (pReleaseKeys == rhs.pReleaseKeys); # endif } bool operator!=(Win32KeyedMutexAcquireReleaseInfoNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV; const void *pNext = {}; uint32_t acquireCount = {}; const VULKAN_HPP_NAMESPACE::DeviceMemory *pAcquireSyncs = {}; const uint64_t *pAcquireKeys = {}; const uint32_t *pAcquireTimeoutMilliseconds = {}; uint32_t releaseCount = {}; const VULKAN_HPP_NAMESPACE::DeviceMemory *pReleaseSyncs = {}; const uint64_t *pReleaseKeys = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV) == sizeof(VkWin32KeyedMutexAcquireReleaseInfoNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "Win32KeyedMutexAcquireReleaseInfoNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = Win32KeyedMutexAcquireReleaseInfoNV; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #if defined(VK_USE_PLATFORM_WIN32_KHR) struct Win32SurfaceCreateInfoKHR { using NativeType = VkWin32SurfaceCreateInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32SurfaceCreateInfoKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ = {}, HINSTANCE hinstance_ = {}, HWND hwnd_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), hinstance(hinstance_), hwnd(hwnd_) { } VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR(Win32SurfaceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; Win32SurfaceCreateInfoKHR(VkWin32SurfaceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : Win32SurfaceCreateInfoKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ Win32SurfaceCreateInfoKHR &operator=(Win32SurfaceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; Win32SurfaceCreateInfoKHR &operator=(VkWin32SurfaceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR &setFlags(VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR &setHinstance(HINSTANCE hinstance_) VULKAN_HPP_NOEXCEPT { hinstance = hinstance_; return *this; } VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR &setHwnd(HWND hwnd_) VULKAN_HPP_NOEXCEPT { hwnd = hwnd_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkWin32SurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkWin32SurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, hinstance, hwnd); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(Win32SurfaceCreateInfoKHR const &) const = default; # else bool operator==(Win32SurfaceCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (hinstance == rhs.hinstance) && (hwnd == rhs.hwnd); # endif } bool operator!=(Win32SurfaceCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } # endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32SurfaceCreateInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags = {}; HINSTANCE hinstance = {}; HWND hwnd = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR) == sizeof(VkWin32SurfaceCreateInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "Win32SurfaceCreateInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = Win32SurfaceCreateInfoKHR; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ struct WriteDescriptorSet { using NativeType = VkWriteDescriptorSet; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSet; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR WriteDescriptorSet(VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, const VULKAN_HPP_NAMESPACE::DescriptorImageInfo *pImageInfo_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo *pBufferInfo_ = {}, const VULKAN_HPP_NAMESPACE::BufferView *pTexelBufferView_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), dstSet(dstSet_), dstBinding(dstBinding_), dstArrayElement(dstArrayElement_), descriptorCount(descriptorCount_), descriptorType(descriptorType_), pImageInfo(pImageInfo_), pBufferInfo(pBufferInfo_), pTexelBufferView(pTexelBufferView_) { } VULKAN_HPP_CONSTEXPR WriteDescriptorSet(WriteDescriptorSet const &rhs) VULKAN_HPP_NOEXCEPT = default; WriteDescriptorSet(VkWriteDescriptorSet const &rhs) VULKAN_HPP_NOEXCEPT : WriteDescriptorSet(*reinterpret_cast(&rhs)) {} # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) WriteDescriptorSet(VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_, uint32_t dstBinding_, uint32_t dstArrayElement_, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &imageInfo_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &bufferInfo_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &texelBufferView_ = {}, const void *pNext_ = nullptr) : pNext(pNext_) , dstSet(dstSet_) , dstBinding(dstBinding_) , dstArrayElement(dstArrayElement_) , descriptorCount(static_cast(!imageInfo_.empty() ? imageInfo_.size() : !bufferInfo_.empty() ? bufferInfo_.size() : texelBufferView_.size())) , descriptorType(descriptorType_) , pImageInfo(imageInfo_.data()) , pBufferInfo(bufferInfo_.data()) , pTexelBufferView(texelBufferView_.data()) { # ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT((!imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty()) <= 1); # else if(1 < (!imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty())) { throw LogicError(VULKAN_HPP_NAMESPACE_STRING "::WriteDescriptorSet::WriteDescriptorSet: 1 < ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() )"); } # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ WriteDescriptorSet &operator=(WriteDescriptorSet const &rhs) VULKAN_HPP_NOEXCEPT = default; WriteDescriptorSet &operator=(VkWriteDescriptorSet const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet &setDstSet(VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_) VULKAN_HPP_NOEXCEPT { dstSet = dstSet_; return *this; } VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet &setDstBinding(uint32_t dstBinding_) VULKAN_HPP_NOEXCEPT { dstBinding = dstBinding_; return *this; } VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet &setDstArrayElement(uint32_t dstArrayElement_) VULKAN_HPP_NOEXCEPT { dstArrayElement = dstArrayElement_; return *this; } VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet &setDescriptorCount(uint32_t descriptorCount_) VULKAN_HPP_NOEXCEPT { descriptorCount = descriptorCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet &setDescriptorType(VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_) VULKAN_HPP_NOEXCEPT { descriptorType = descriptorType_; return *this; } VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet &setPImageInfo(const VULKAN_HPP_NAMESPACE::DescriptorImageInfo *pImageInfo_) VULKAN_HPP_NOEXCEPT { pImageInfo = pImageInfo_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) WriteDescriptorSet & setImageInfo(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &imageInfo_) VULKAN_HPP_NOEXCEPT { descriptorCount = static_cast(imageInfo_.size()); pImageInfo = imageInfo_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet &setPBufferInfo(const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo *pBufferInfo_) VULKAN_HPP_NOEXCEPT { pBufferInfo = pBufferInfo_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) WriteDescriptorSet & setBufferInfo(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &bufferInfo_) VULKAN_HPP_NOEXCEPT { descriptorCount = static_cast(bufferInfo_.size()); pBufferInfo = bufferInfo_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet &setPTexelBufferView(const VULKAN_HPP_NAMESPACE::BufferView *pTexelBufferView_) VULKAN_HPP_NOEXCEPT { pTexelBufferView = pTexelBufferView_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) WriteDescriptorSet & setTexelBufferView(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &texelBufferView_) VULKAN_HPP_NOEXCEPT { descriptorCount = static_cast(texelBufferView_.size()); pTexelBufferView = texelBufferView_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkWriteDescriptorSet const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkWriteDescriptorSet &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, dstSet, dstBinding, dstArrayElement, descriptorCount, descriptorType, pImageInfo, pBufferInfo, pTexelBufferView); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(WriteDescriptorSet const &) const = default; #else bool operator==(WriteDescriptorSet const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (dstSet == rhs.dstSet) && (dstBinding == rhs.dstBinding) && (dstArrayElement == rhs.dstArrayElement) && (descriptorCount == rhs.descriptorCount) && (descriptorType == rhs.descriptorType) && (pImageInfo == rhs.pImageInfo) && (pBufferInfo == rhs.pBufferInfo) && (pTexelBufferView == rhs.pTexelBufferView); # endif } bool operator!=(WriteDescriptorSet const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSet; const void *pNext = {}; VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {}; uint32_t dstBinding = {}; uint32_t dstArrayElement = {}; uint32_t descriptorCount = {}; VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; const VULKAN_HPP_NAMESPACE::DescriptorImageInfo *pImageInfo = {}; const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo *pBufferInfo = {}; const VULKAN_HPP_NAMESPACE::BufferView *pTexelBufferView = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::WriteDescriptorSet) == sizeof(VkWriteDescriptorSet), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "WriteDescriptorSet is not nothrow_move_constructible!"); template<> struct CppType { using Type = WriteDescriptorSet; }; struct WriteDescriptorSetAccelerationStructureKHR { using NativeType = VkWriteDescriptorSetAccelerationStructureKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetAccelerationStructureKHR; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR(uint32_t accelerationStructureCount_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR *pAccelerationStructures_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), accelerationStructureCount(accelerationStructureCount_), pAccelerationStructures(pAccelerationStructures_) { } VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR(WriteDescriptorSetAccelerationStructureKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; WriteDescriptorSetAccelerationStructureKHR(VkWriteDescriptorSetAccelerationStructureKHR const &rhs) VULKAN_HPP_NOEXCEPT : WriteDescriptorSetAccelerationStructureKHR(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) WriteDescriptorSetAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &accelerationStructures_, const void *pNext_ = nullptr) : pNext(pNext_) , accelerationStructureCount(static_cast(accelerationStructures_.size())) , pAccelerationStructures(accelerationStructures_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ WriteDescriptorSetAccelerationStructureKHR &operator=(WriteDescriptorSetAccelerationStructureKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; WriteDescriptorSetAccelerationStructureKHR &operator=(VkWriteDescriptorSetAccelerationStructureKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR & setAccelerationStructureCount(uint32_t accelerationStructureCount_) VULKAN_HPP_NOEXCEPT { accelerationStructureCount = accelerationStructureCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR & setPAccelerationStructures(const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR *pAccelerationStructures_) VULKAN_HPP_NOEXCEPT { pAccelerationStructures = pAccelerationStructures_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) WriteDescriptorSetAccelerationStructureKHR &setAccelerationStructures( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &accelerationStructures_) VULKAN_HPP_NOEXCEPT { accelerationStructureCount = static_cast(accelerationStructures_.size()); pAccelerationStructures = accelerationStructures_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkWriteDescriptorSetAccelerationStructureKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkWriteDescriptorSetAccelerationStructureKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std:: tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, accelerationStructureCount, pAccelerationStructures); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(WriteDescriptorSetAccelerationStructureKHR const &) const = default; #else bool operator==(WriteDescriptorSetAccelerationStructureKHR const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (accelerationStructureCount == rhs.accelerationStructureCount) && (pAccelerationStructures == rhs.pAccelerationStructures); # endif } bool operator!=(WriteDescriptorSetAccelerationStructureKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureKHR; const void *pNext = {}; uint32_t accelerationStructureCount = {}; const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR *pAccelerationStructures = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR) == sizeof(VkWriteDescriptorSetAccelerationStructureKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "WriteDescriptorSetAccelerationStructureKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = WriteDescriptorSetAccelerationStructureKHR; }; struct WriteDescriptorSetAccelerationStructureNV { using NativeType = VkWriteDescriptorSetAccelerationStructureNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetAccelerationStructureNV; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV(uint32_t accelerationStructureCount_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV *pAccelerationStructures_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), accelerationStructureCount(accelerationStructureCount_), pAccelerationStructures(pAccelerationStructures_) { } VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV(WriteDescriptorSetAccelerationStructureNV const &rhs) VULKAN_HPP_NOEXCEPT = default; WriteDescriptorSetAccelerationStructureNV(VkWriteDescriptorSetAccelerationStructureNV const &rhs) VULKAN_HPP_NOEXCEPT : WriteDescriptorSetAccelerationStructureNV(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) WriteDescriptorSetAccelerationStructureNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &accelerationStructures_, const void *pNext_ = nullptr) : pNext(pNext_) , accelerationStructureCount(static_cast(accelerationStructures_.size())) , pAccelerationStructures(accelerationStructures_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ WriteDescriptorSetAccelerationStructureNV &operator=(WriteDescriptorSetAccelerationStructureNV const &rhs) VULKAN_HPP_NOEXCEPT = default; WriteDescriptorSetAccelerationStructureNV &operator=(VkWriteDescriptorSetAccelerationStructureNV const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV & setAccelerationStructureCount(uint32_t accelerationStructureCount_) VULKAN_HPP_NOEXCEPT { accelerationStructureCount = accelerationStructureCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV & setPAccelerationStructures(const VULKAN_HPP_NAMESPACE::AccelerationStructureNV *pAccelerationStructures_) VULKAN_HPP_NOEXCEPT { pAccelerationStructures = pAccelerationStructures_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) WriteDescriptorSetAccelerationStructureNV &setAccelerationStructures( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &accelerationStructures_) VULKAN_HPP_NOEXCEPT { accelerationStructureCount = static_cast(accelerationStructures_.size()); pAccelerationStructures = accelerationStructures_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkWriteDescriptorSetAccelerationStructureNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkWriteDescriptorSetAccelerationStructureNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std:: tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, accelerationStructureCount, pAccelerationStructures); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(WriteDescriptorSetAccelerationStructureNV const &) const = default; #else bool operator==(WriteDescriptorSetAccelerationStructureNV const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (accelerationStructureCount == rhs.accelerationStructureCount) && (pAccelerationStructures == rhs.pAccelerationStructures); # endif } bool operator!=(WriteDescriptorSetAccelerationStructureNV const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureNV; const void *pNext = {}; uint32_t accelerationStructureCount = {}; const VULKAN_HPP_NAMESPACE::AccelerationStructureNV *pAccelerationStructures = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV) == sizeof(VkWriteDescriptorSetAccelerationStructureNV), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "WriteDescriptorSetAccelerationStructureNV is not nothrow_move_constructible!"); template<> struct CppType { using Type = WriteDescriptorSetAccelerationStructureNV; }; struct WriteDescriptorSetInlineUniformBlock { using NativeType = VkWriteDescriptorSetInlineUniformBlock; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetInlineUniformBlock; #if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlock(uint32_t dataSize_ = {}, const void *pData_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), dataSize(dataSize_), pData(pData_) { } VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlock(WriteDescriptorSetInlineUniformBlock const &rhs) VULKAN_HPP_NOEXCEPT = default; WriteDescriptorSetInlineUniformBlock(VkWriteDescriptorSetInlineUniformBlock const &rhs) VULKAN_HPP_NOEXCEPT : WriteDescriptorSetInlineUniformBlock(*reinterpret_cast(&rhs)) { } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) template WriteDescriptorSetInlineUniformBlock(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &data_, const void *pNext_ = nullptr) : pNext(pNext_) , dataSize(static_cast(data_.size() * sizeof(T))) , pData(data_.data()) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ WriteDescriptorSetInlineUniformBlock &operator=(WriteDescriptorSetInlineUniformBlock const &rhs) VULKAN_HPP_NOEXCEPT = default; WriteDescriptorSetInlineUniformBlock &operator=(VkWriteDescriptorSetInlineUniformBlock const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } #if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlock &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlock &setDataSize(uint32_t dataSize_) VULKAN_HPP_NOEXCEPT { dataSize = dataSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlock &setPData(const void *pData_) VULKAN_HPP_NOEXCEPT { pData = pData_; return *this; } # if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) template WriteDescriptorSetInlineUniformBlock &setData(VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const &data_) VULKAN_HPP_NOEXCEPT { dataSize = static_cast(data_.size() * sizeof(T)); pData = data_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkWriteDescriptorSetInlineUniformBlock const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkWriteDescriptorSetInlineUniformBlock &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } #if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, dataSize, pData); } #endif #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) auto operator<=>(WriteDescriptorSetInlineUniformBlock const &) const = default; #else bool operator==(WriteDescriptorSetInlineUniformBlock const &rhs) const VULKAN_HPP_NOEXCEPT { # if defined(VULKAN_HPP_USE_REFLECT) return this->reflect() == rhs.reflect(); # else return (sType == rhs.sType) && (pNext == rhs.pNext) && (dataSize == rhs.dataSize) && (pData == rhs.pData); # endif } bool operator!=(WriteDescriptorSetInlineUniformBlock const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } #endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetInlineUniformBlock; const void *pNext = {}; uint32_t dataSize = {}; const void *pData = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock) == sizeof(VkWriteDescriptorSetInlineUniformBlock), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "WriteDescriptorSetInlineUniformBlock is not nothrow_move_constructible!"); template<> struct CppType { using Type = WriteDescriptorSetInlineUniformBlock; }; using WriteDescriptorSetInlineUniformBlockEXT = WriteDescriptorSetInlineUniformBlock; #if defined(VK_USE_PLATFORM_XCB_KHR) struct XcbSurfaceCreateInfoKHR { using NativeType = VkXcbSurfaceCreateInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXcbSurfaceCreateInfoKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ = {}, xcb_connection_t *connection_ = {}, xcb_window_t window_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), connection(connection_), window(window_) { } VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR(XcbSurfaceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; XcbSurfaceCreateInfoKHR(VkXcbSurfaceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : XcbSurfaceCreateInfoKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ XcbSurfaceCreateInfoKHR &operator=(XcbSurfaceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; XcbSurfaceCreateInfoKHR &operator=(VkXcbSurfaceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR &setFlags(VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR &setConnection(xcb_connection_t *connection_) VULKAN_HPP_NOEXCEPT { connection = connection_; return *this; } VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR &setWindow(xcb_window_t window_) VULKAN_HPP_NOEXCEPT { window = window_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkXcbSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkXcbSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, connection, window); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) std::strong_ordering operator<=>(XcbSurfaceCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { if(auto cmp = sType <=> rhs.sType; cmp != 0) return cmp; if(auto cmp = pNext <=> rhs.pNext; cmp != 0) return cmp; if(auto cmp = flags <=> rhs.flags; cmp != 0) return cmp; if(auto cmp = connection <=> rhs.connection; cmp != 0) return cmp; if(auto cmp = memcmp(&window, &rhs.window, sizeof(xcb_window_t)); cmp != 0) return (cmp < 0) ? std::strong_ordering::less : std::strong_ordering::greater; return std::strong_ordering::equivalent; } # endif bool operator==(XcbSurfaceCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (connection == rhs.connection) && (memcmp(&window, &rhs.window, sizeof(xcb_window_t)) == 0); } bool operator!=(XcbSurfaceCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXcbSurfaceCreateInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags = {}; xcb_connection_t *connection = {}; xcb_window_t window = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR) == sizeof(VkXcbSurfaceCreateInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "XcbSurfaceCreateInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = XcbSurfaceCreateInfoKHR; }; #endif /*VK_USE_PLATFORM_XCB_KHR*/ #if defined(VK_USE_PLATFORM_XLIB_KHR) struct XlibSurfaceCreateInfoKHR { using NativeType = VkXlibSurfaceCreateInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXlibSurfaceCreateInfoKHR; # if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS) VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ = {}, Display *dpy_ = {}, Window window_ = {}, const void *pNext_ = nullptr) VULKAN_HPP_NOEXCEPT : pNext(pNext_), flags(flags_), dpy(dpy_), window(window_) { } VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR(XlibSurfaceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; XlibSurfaceCreateInfoKHR(VkXlibSurfaceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT : XlibSurfaceCreateInfoKHR(*reinterpret_cast(&rhs)) { } # endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ XlibSurfaceCreateInfoKHR &operator=(XlibSurfaceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT = default; XlibSurfaceCreateInfoKHR &operator=(VkXlibSurfaceCreateInfoKHR const &rhs) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast(&rhs); return *this; } # if !defined(VULKAN_HPP_NO_STRUCT_SETTERS) VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR &setPNext(const void *pNext_) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR &setFlags(VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR &setDpy(Display *dpy_) VULKAN_HPP_NOEXCEPT { dpy = dpy_; return *this; } VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR &setWindow(Window window_) VULKAN_HPP_NOEXCEPT { window = window_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ explicit operator VkXlibSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } explicit operator VkXlibSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast(this); } # if defined(VULKAN_HPP_USE_REFLECT) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie(sType, pNext, flags, dpy, window); } # endif # if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) std::strong_ordering operator<=>(XlibSurfaceCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { if(auto cmp = sType <=> rhs.sType; cmp != 0) return cmp; if(auto cmp = pNext <=> rhs.pNext; cmp != 0) return cmp; if(auto cmp = flags <=> rhs.flags; cmp != 0) return cmp; if(auto cmp = dpy <=> rhs.dpy; cmp != 0) return cmp; if(auto cmp = memcmp(&window, &rhs.window, sizeof(Window)); cmp != 0) return (cmp < 0) ? std::strong_ordering::less : std::strong_ordering::greater; return std::strong_ordering::equivalent; } # endif bool operator==(XlibSurfaceCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return (sType == rhs.sType) && (pNext == rhs.pNext) && (flags == rhs.flags) && (dpy == rhs.dpy) && (memcmp(&window, &rhs.window, sizeof(Window)) == 0); } bool operator!=(XlibSurfaceCreateInfoKHR const &rhs) const VULKAN_HPP_NOEXCEPT { return !operator==(rhs); } public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXlibSurfaceCreateInfoKHR; const void *pNext = {}; VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags = {}; Display *dpy = {}; Window window = {}; }; VULKAN_HPP_STATIC_ASSERT(sizeof(VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR) == sizeof(VkXlibSurfaceCreateInfoKHR), "struct and wrapper have different size!"); VULKAN_HPP_STATIC_ASSERT(std::is_standard_layout::value, "struct wrapper is not a standard layout!"); VULKAN_HPP_STATIC_ASSERT(std::is_nothrow_move_constructible::value, "XlibSurfaceCreateInfoKHR is not nothrow_move_constructible!"); template<> struct CppType { using Type = XlibSurfaceCreateInfoKHR; }; #endif /*VK_USE_PLATFORM_XLIB_KHR*/ } // namespace VULKAN_HPP_NAMESPACE #endif