1 // Copyright 2015-2023 The Khronos Group Inc.
2 //
3 // SPDX-License-Identifier: Apache-2.0 OR MIT
4 //
5 
6 // This header is generated from the Khronos Vulkan XML API Registry.
7 
8 #ifndef VULKAN_FUNCS_HPP
9 #  define VULKAN_FUNCS_HPP
10 
11 namespace VULKAN_HPP_NAMESPACE
12 {
13 
14   //===========================
15   //=== COMMAND Definitions ===
16   //===========================
17 
18 
19   //=== VK_VERSION_1_0 ===
20 
21 
22   template <typename Dispatch>
createInstance(const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Instance * pInstance,Dispatch const & d)23   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Instance * pInstance, Dispatch const & d  ) VULKAN_HPP_NOEXCEPT
24   {
25     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
26     return static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkInstance *>( pInstance ) ) );
27   }
28 
29 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
30   template <typename Dispatch>
createInstance(const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d)31   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Instance>::type createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d )
32   {
33     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
34 
35 
36     VULKAN_HPP_NAMESPACE::Instance instance;
37     VkResult result = d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkInstance *>( &instance ) );
38     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::createInstance" );
39 
40     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), instance );
41   }
42 
43 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
44   template <typename Dispatch>
createInstanceUnique(const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d)45   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>>::type createInstanceUnique( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d )
46   {
47     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
48 
49 
50     VULKAN_HPP_NAMESPACE::Instance instance;
51     VkResult result = d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkInstance *>( &instance ) );
52     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique" );
53 
54     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>( instance, ObjectDestroy<NoParent, Dispatch>( allocator, d ) ) );
55   }
56 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
57 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
58 
59 
60   template <typename Dispatch>
destroy(const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const61   VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
62   {
63     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
64     d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
65   }
66 
67 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
68   template <typename Dispatch>
destroy(Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const69   VULKAN_HPP_INLINE void Instance::destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
70   {
71     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
72 
73 
74 
75     d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
76 
77 
78 
79   }
80 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
81 
82 
83   template <typename Dispatch>
enumeratePhysicalDevices(uint32_t * pPhysicalDeviceCount,VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices,Dispatch const & d) const84   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount, VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
85   {
86     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
87     return static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( pPhysicalDevices ) ) );
88   }
89 
90 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
91   template <typename PhysicalDeviceAllocator, typename Dispatch>
enumeratePhysicalDevices(Dispatch const & d) const92   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator>>::type Instance::enumeratePhysicalDevices( Dispatch const & d ) const
93   {
94     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
95 
96 
97     std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator> physicalDevices;
98     uint32_t physicalDeviceCount;
99     VkResult result;
100     do
101     {
102       result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr );
103       if ( ( result == VK_SUCCESS ) && physicalDeviceCount )
104       {
105         physicalDevices.resize( physicalDeviceCount );
106         result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) );
107       }
108     } while ( result == VK_INCOMPLETE );
109     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" );
110     VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
111     if ( physicalDeviceCount < physicalDevices.size() )
112     {
113       physicalDevices.resize( physicalDeviceCount );
114     }
115     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDevices );
116   }
117 
118   template <typename PhysicalDeviceAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDevice>::value, int>::type>
enumeratePhysicalDevices(PhysicalDeviceAllocator & physicalDeviceAllocator,Dispatch const & d) const119   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator>>::type Instance::enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d ) const
120   {
121     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
122 
123 
124     std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator> physicalDevices( physicalDeviceAllocator );
125     uint32_t physicalDeviceCount;
126     VkResult result;
127     do
128     {
129       result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr );
130       if ( ( result == VK_SUCCESS ) && physicalDeviceCount )
131       {
132         physicalDevices.resize( physicalDeviceCount );
133         result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) );
134       }
135     } while ( result == VK_INCOMPLETE );
136     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" );
137     VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
138     if ( physicalDeviceCount < physicalDevices.size() )
139     {
140       physicalDevices.resize( physicalDeviceCount );
141     }
142     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDevices );
143   }
144 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
145 
146 
147   template <typename Dispatch>
getFeatures(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures,Dispatch const & d) const148   VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
149   {
150     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
151     d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( pFeatures ) );
152   }
153 
154 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
155   template <typename Dispatch>
getFeatures(Dispatch const & d) const156   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures PhysicalDevice::getFeatures( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
157   {
158     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
159 
160 
161     VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features;
162     d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( &features ) );
163 
164 
165     return features;
166   }
167 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
168 
169 
170   template <typename Dispatch>
getFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties,Dispatch const & d) const171   VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
172   {
173     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
174     d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( pFormatProperties ) );
175   }
176 
177 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
178   template <typename Dispatch>
getFormatProperties(VULKAN_HPP_NAMESPACE::Format format,Dispatch const & d) const179   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
180   {
181     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
182 
183 
184     VULKAN_HPP_NAMESPACE::FormatProperties formatProperties;
185     d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( &formatProperties ) );
186 
187 
188     return formatProperties;
189   }
190 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
191 
192 
193   template <typename Dispatch>
getImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::ImageTiling tiling,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties,Dispatch const & d) const194   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
195   {
196     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
197     return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast<VkImageFormatProperties *>( pImageFormatProperties ) ) );
198   }
199 
200 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
201   template <typename Dispatch>
getImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::ImageType type,VULKAN_HPP_NAMESPACE::ImageTiling tiling,VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,Dispatch const & d) const202   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, Dispatch const & d ) const
203   {
204     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
205 
206 
207     VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties;
208     VkResult result = d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast<VkImageFormatProperties *>( &imageFormatProperties ) );
209     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" );
210 
211     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
212   }
213 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
214 
215 
216   template <typename Dispatch>
getProperties(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties,Dispatch const & d) const217   VULKAN_HPP_INLINE void PhysicalDevice::getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
218   {
219     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
220     d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( pProperties ) );
221   }
222 
223 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
224   template <typename Dispatch>
getProperties(Dispatch const & d) const225   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties PhysicalDevice::getProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
226   {
227     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
228 
229 
230     VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties;
231     d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( &properties ) );
232 
233 
234     return properties;
235   }
236 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
237 
238 
239   template <typename Dispatch>
getQueueFamilyProperties(uint32_t * pQueueFamilyPropertyCount,VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties,Dispatch const & d) const240   VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
241   {
242     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
243     d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( pQueueFamilyProperties ) );
244   }
245 
246 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
247   template <typename QueueFamilyPropertiesAllocator, typename Dispatch>
getQueueFamilyProperties(Dispatch const & d) const248   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> PhysicalDevice::getQueueFamilyProperties( Dispatch const & d ) const
249   {
250     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
251 
252 
253     std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties;
254     uint32_t queueFamilyPropertyCount;
255     d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
256     queueFamilyProperties.resize( queueFamilyPropertyCount );
257     d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
258 
259     VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
260     if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
261     {
262       queueFamilyProperties.resize( queueFamilyPropertyCount );
263     }
264     return queueFamilyProperties;
265   }
266 
267   template <typename QueueFamilyPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, QueueFamilyProperties>::value, int>::type>
getQueueFamilyProperties(QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator,Dispatch const & d) const268   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> PhysicalDevice::getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, Dispatch const & d ) const
269   {
270     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
271 
272 
273     std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties( queueFamilyPropertiesAllocator );
274     uint32_t queueFamilyPropertyCount;
275     d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
276     queueFamilyProperties.resize( queueFamilyPropertyCount );
277     d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
278 
279     VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
280     if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
281     {
282       queueFamilyProperties.resize( queueFamilyPropertyCount );
283     }
284     return queueFamilyProperties;
285   }
286 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
287 
288 
289   template <typename Dispatch>
getMemoryProperties(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties,Dispatch const & d) const290   VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
291   {
292     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
293     d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( pMemoryProperties ) );
294   }
295 
296 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
297   template <typename Dispatch>
getMemoryProperties(Dispatch const & d) const298   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties PhysicalDevice::getMemoryProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
299   {
300     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
301 
302 
303     VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties;
304     d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( &memoryProperties ) );
305 
306 
307     return memoryProperties;
308   }
309 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
310 
311 
312   template <typename Dispatch>
getProcAddr(const char * pName,Dispatch const & d) const313   VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char * pName, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
314   {
315     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
316     return d.vkGetInstanceProcAddr( m_instance, pName );
317   }
318 
319 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
320   template <typename Dispatch>
getProcAddr(const std::string & name,Dispatch const & d) const321   VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
322   {
323     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
324 
325 
326 
327     PFN_vkVoidFunction result = d.vkGetInstanceProcAddr( m_instance, name.c_str() );
328 
329 
330     return result;
331   }
332 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
333 
334 
335   template <typename Dispatch>
getProcAddr(const char * pName,Dispatch const & d) const336   VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char * pName, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
337   {
338     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
339     return d.vkGetDeviceProcAddr( m_device, pName );
340   }
341 
342 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
343   template <typename Dispatch>
getProcAddr(const std::string & name,Dispatch const & d) const344   VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
345   {
346     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
347 
348 
349 
350     PFN_vkVoidFunction result = d.vkGetDeviceProcAddr( m_device, name.c_str() );
351 
352 
353     return result;
354   }
355 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
356 
357 
358   template <typename Dispatch>
createDevice(const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Device * pDevice,Dispatch const & d) const359   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Device * pDevice, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
360   {
361     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
362     return static_cast<Result>( d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDevice *>( pDevice ) ) );
363   }
364 
365 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
366   template <typename Dispatch>
createDevice(const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const367   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
368   {
369     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
370 
371 
372     VULKAN_HPP_NAMESPACE::Device device;
373     VkResult result = d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDevice *>( &device ) );
374     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" );
375 
376     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), device );
377   }
378 
379 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
380   template <typename Dispatch>
createDeviceUnique(const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const381   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>>::type PhysicalDevice::createDeviceUnique( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
382   {
383     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
384 
385 
386     VULKAN_HPP_NAMESPACE::Device device;
387     VkResult result = d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDevice *>( &device ) );
388     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique" );
389 
390     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>( device, ObjectDestroy<NoParent, Dispatch>( allocator, d ) ) );
391   }
392 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
393 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
394 
395 
396   template <typename Dispatch>
destroy(const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const397   VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
398   {
399     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
400     d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
401   }
402 
403 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
404   template <typename Dispatch>
destroy(Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const405   VULKAN_HPP_INLINE void Device::destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
406   {
407     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
408 
409 
410 
411     d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
412 
413 
414 
415   }
416 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
417 
418 
419   template <typename Dispatch>
enumerateInstanceExtensionProperties(const char * pLayerName,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,Dispatch const & d)420   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char * pLayerName, uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, Dispatch const & d  ) VULKAN_HPP_NOEXCEPT
421   {
422     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
423     return static_cast<Result>( d.vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) );
424   }
425 
426 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
427   template <typename ExtensionPropertiesAllocator, typename Dispatch>
enumerateInstanceExtensionProperties(Optional<const std::string> layerName,Dispatch const & d)428   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Dispatch const & d )
429   {
430     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
431 
432 
433     std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties;
434     uint32_t propertyCount;
435     VkResult result;
436     do
437     {
438       result = d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
439       if ( ( result == VK_SUCCESS ) && propertyCount )
440       {
441         properties.resize( propertyCount );
442         result = d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
443       }
444     } while ( result == VK_INCOMPLETE );
445     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" );
446     VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
447     if ( propertyCount < properties.size() )
448     {
449       properties.resize( propertyCount );
450     }
451     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
452   }
453 
454   template <typename ExtensionPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, ExtensionProperties>::value, int>::type>
enumerateInstanceExtensionProperties(Optional<const std::string> layerName,ExtensionPropertiesAllocator & extensionPropertiesAllocator,Dispatch const & d)455   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName, ExtensionPropertiesAllocator & extensionPropertiesAllocator, Dispatch const & d )
456   {
457     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
458 
459 
460     std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator );
461     uint32_t propertyCount;
462     VkResult result;
463     do
464     {
465       result = d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
466       if ( ( result == VK_SUCCESS ) && propertyCount )
467       {
468         properties.resize( propertyCount );
469         result = d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
470       }
471     } while ( result == VK_INCOMPLETE );
472     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" );
473     VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
474     if ( propertyCount < properties.size() )
475     {
476       properties.resize( propertyCount );
477     }
478     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
479   }
480 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
481 
482 
483   template <typename Dispatch>
enumerateDeviceExtensionProperties(const char * pLayerName,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,Dispatch const & d) const484   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char * pLayerName, uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
485   {
486     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
487     return static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) );
488   }
489 
490 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
491   template <typename ExtensionPropertiesAllocator, typename Dispatch>
enumerateDeviceExtensionProperties(Optional<const std::string> layerName,Dispatch const & d) const492   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, Dispatch const & d ) const
493   {
494     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
495 
496 
497     std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties;
498     uint32_t propertyCount;
499     VkResult result;
500     do
501     {
502       result = d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
503       if ( ( result == VK_SUCCESS ) && propertyCount )
504       {
505         properties.resize( propertyCount );
506         result = d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
507       }
508     } while ( result == VK_INCOMPLETE );
509     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" );
510     VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
511     if ( propertyCount < properties.size() )
512     {
513       properties.resize( propertyCount );
514     }
515     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
516   }
517 
518   template <typename ExtensionPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, ExtensionProperties>::value, int>::type>
enumerateDeviceExtensionProperties(Optional<const std::string> layerName,ExtensionPropertiesAllocator & extensionPropertiesAllocator,Dispatch const & d) const519   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, ExtensionPropertiesAllocator & extensionPropertiesAllocator, Dispatch const & d ) const
520   {
521     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
522 
523 
524     std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator );
525     uint32_t propertyCount;
526     VkResult result;
527     do
528     {
529       result = d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
530       if ( ( result == VK_SUCCESS ) && propertyCount )
531       {
532         properties.resize( propertyCount );
533         result = d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
534       }
535     } while ( result == VK_INCOMPLETE );
536     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" );
537     VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
538     if ( propertyCount < properties.size() )
539     {
540       properties.resize( propertyCount );
541     }
542     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
543   }
544 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
545 
546 
547   template <typename Dispatch>
enumerateInstanceLayerProperties(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,Dispatch const & d)548   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, Dispatch const & d  ) VULKAN_HPP_NOEXCEPT
549   {
550     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
551     return static_cast<Result>( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) );
552   }
553 
554 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
555   template <typename LayerPropertiesAllocator, typename Dispatch>
enumerateInstanceLayerProperties(Dispatch const & d)556   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type enumerateInstanceLayerProperties( Dispatch const & d )
557   {
558     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
559 
560 
561     std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties;
562     uint32_t propertyCount;
563     VkResult result;
564     do
565     {
566       result = d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr );
567       if ( ( result == VK_SUCCESS ) && propertyCount )
568       {
569         properties.resize( propertyCount );
570         result = d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
571       }
572     } while ( result == VK_INCOMPLETE );
573     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" );
574     VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
575     if ( propertyCount < properties.size() )
576     {
577       properties.resize( propertyCount );
578     }
579     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
580   }
581 
582   template <typename LayerPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, LayerProperties>::value, int>::type>
enumerateInstanceLayerProperties(LayerPropertiesAllocator & layerPropertiesAllocator,Dispatch const & d)583   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d )
584   {
585     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
586 
587 
588     std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator );
589     uint32_t propertyCount;
590     VkResult result;
591     do
592     {
593       result = d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr );
594       if ( ( result == VK_SUCCESS ) && propertyCount )
595       {
596         properties.resize( propertyCount );
597         result = d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
598       }
599     } while ( result == VK_INCOMPLETE );
600     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" );
601     VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
602     if ( propertyCount < properties.size() )
603     {
604       properties.resize( propertyCount );
605     }
606     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
607   }
608 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
609 
610 
611   template <typename Dispatch>
enumerateDeviceLayerProperties(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,Dispatch const & d) const612   VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
613   {
614     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
615     return static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) );
616   }
617 
618 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
619   template <typename LayerPropertiesAllocator, typename Dispatch>
enumerateDeviceLayerProperties(Dispatch const & d) const620   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> PhysicalDevice::enumerateDeviceLayerProperties( Dispatch const & d ) const
621   {
622     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
623 
624 
625     std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties;
626     uint32_t propertyCount;
627     d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr );
628       properties.resize( propertyCount );
629       d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
630 
631 
632     VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
633     if ( propertyCount < properties.size() )
634     {
635       properties.resize( propertyCount );
636     }
637     return properties;
638   }
639 
640   template <typename LayerPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, LayerProperties>::value, int>::type>
enumerateDeviceLayerProperties(LayerPropertiesAllocator & layerPropertiesAllocator,Dispatch const & d) const641   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> PhysicalDevice::enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) const
642   {
643     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
644 
645 
646     std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator );
647     uint32_t propertyCount;
648     d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr );
649       properties.resize( propertyCount );
650       d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
651 
652 
653     VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
654     if ( propertyCount < properties.size() )
655     {
656       properties.resize( propertyCount );
657     }
658     return properties;
659   }
660 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
661 
662 
663   template <typename Dispatch>
getQueue(uint32_t queueFamilyIndex,uint32_t queueIndex,VULKAN_HPP_NAMESPACE::Queue * pQueue,Dispatch const & d) const664   VULKAN_HPP_INLINE void Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue * pQueue, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
665   {
666     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
667     d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( pQueue ) );
668   }
669 
670 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
671   template <typename Dispatch>
getQueue(uint32_t queueFamilyIndex,uint32_t queueIndex,Dispatch const & d) const672   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
673   {
674     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
675 
676 
677     VULKAN_HPP_NAMESPACE::Queue queue;
678     d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( &queue ) );
679 
680 
681     return queue;
682   }
683 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
684 
685 
686   template <typename Dispatch>
submit(uint32_t submitCount,const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const687   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
688   {
689     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
690     return static_cast<Result>( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo *>( pSubmits ), static_cast<VkFence>( fence ) ) );
691   }
692 
693 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
694   template <typename Dispatch>
submit(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const695   VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
696   {
697     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
698 
699 
700 
701     VkResult result = d.vkQueueSubmit( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo *>( submits.data() ), static_cast<VkFence>( fence ) );
702     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" );
703 
704     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
705   }
706 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
707 
708 
709 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
710   template <typename Dispatch>
waitIdle(Dispatch const & d) const711   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
712   {
713     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
714     return static_cast<Result>( d.vkQueueWaitIdle( m_queue ) );
715   }
716 #else
717   template <typename Dispatch>
waitIdle(Dispatch const & d) const718   VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::waitIdle( Dispatch const & d ) const
719   {
720     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
721 
722 
723 
724     VkResult result = d.vkQueueWaitIdle( m_queue );
725     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" );
726 
727     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
728   }
729 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
730 
731 
732 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
733   template <typename Dispatch>
waitIdle(Dispatch const & d) const734   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
735   {
736     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
737     return static_cast<Result>( d.vkDeviceWaitIdle( m_device ) );
738   }
739 #else
740   template <typename Dispatch>
waitIdle(Dispatch const & d) const741   VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::waitIdle( Dispatch const & d ) const
742   {
743     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
744 
745 
746 
747     VkResult result = d.vkDeviceWaitIdle( m_device );
748     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" );
749 
750     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
751   }
752 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
753 
754 
755   template <typename Dispatch>
allocateMemory(const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory,Dispatch const & d) const756   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
757   {
758     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
759     return static_cast<Result>( d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo *>( pAllocateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDeviceMemory *>( pMemory ) ) );
760   }
761 
762 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
763   template <typename Dispatch>
allocateMemory(const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const764   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
765   {
766     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
767 
768 
769     VULKAN_HPP_NAMESPACE::DeviceMemory memory;
770     VkResult result = d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDeviceMemory *>( &memory ) );
771     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" );
772 
773     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memory );
774   }
775 
776 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
777   template <typename Dispatch>
allocateMemoryUnique(const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const778   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>>::type Device::allocateMemoryUnique( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
779   {
780     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
781 
782 
783     VULKAN_HPP_NAMESPACE::DeviceMemory memory;
784     VkResult result = d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDeviceMemory *>( &memory ) );
785     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique" );
786 
787     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>( memory, ObjectFree<Device, Dispatch>( *this, allocator, d ) ) );
788   }
789 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
790 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
791 
792 
793   template <typename Dispatch>
mapMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::DeviceSize size,VULKAN_HPP_NAMESPACE::MemoryMapFlags flags,void ** ppData,Dispatch const & d) const794   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, void ** ppData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
795   {
796     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
797     return static_cast<Result>( d.vkMapMemory( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( offset ), static_cast<VkDeviceSize>( size ), static_cast<VkMemoryMapFlags>( flags ), ppData ) );
798   }
799 
800 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
801   template <typename Dispatch>
mapMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::DeviceSize size,VULKAN_HPP_NAMESPACE::MemoryMapFlags flags,Dispatch const & d) const802   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<void *>::type Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, Dispatch const & d ) const
803   {
804     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
805 
806 
807     void * pData;
808     VkResult result = d.vkMapMemory( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( offset ), static_cast<VkDeviceSize>( size ), static_cast<VkMemoryMapFlags>( flags ), &pData );
809     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" );
810 
811     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pData );
812   }
813 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
814 
815 
816   template <typename Dispatch>
unmapMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory,Dispatch const & d) const817   VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
818   {
819     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
820     d.vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) );
821   }
822 
823 
824   template <typename Dispatch>
flushMappedMemoryRanges(uint32_t memoryRangeCount,const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,Dispatch const & d) const825   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
826   {
827     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
828     return static_cast<Result>( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
829   }
830 
831 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
832   template <typename Dispatch>
flushMappedMemoryRanges(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,Dispatch const & d) const833   VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, Dispatch const & d ) const
834   {
835     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
836 
837 
838 
839     VkResult result = d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) );
840     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" );
841 
842     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
843   }
844 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
845 
846 
847   template <typename Dispatch>
invalidateMappedMemoryRanges(uint32_t memoryRangeCount,const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,Dispatch const & d) const848   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
849   {
850     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
851     return static_cast<Result>( d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
852   }
853 
854 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
855   template <typename Dispatch>
invalidateMappedMemoryRanges(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,Dispatch const & d) const856   VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, Dispatch const & d ) const
857   {
858     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
859 
860 
861 
862     VkResult result = d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) );
863     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" );
864 
865     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
866   }
867 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
868 
869 
870   template <typename Dispatch>
getMemoryCommitment(VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes,Dispatch const & d) const871   VULKAN_HPP_INLINE void Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
872   {
873     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
874     d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( pCommittedMemoryInBytes ) );
875   }
876 
877 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
878   template <typename Dispatch>
getMemoryCommitment(VULKAN_HPP_NAMESPACE::DeviceMemory memory,Dispatch const & d) const879   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
880   {
881     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
882 
883 
884     VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes;
885     d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( &committedMemoryInBytes ) );
886 
887 
888     return committedMemoryInBytes;
889   }
890 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
891 
892 
893 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
894   template <typename Dispatch>
bindBufferMemory(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,Dispatch const & d) const895   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
896   {
897     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
898     return static_cast<Result>( d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
899   }
900 #else
901   template <typename Dispatch>
bindBufferMemory(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,Dispatch const & d) const902   VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const
903   {
904     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
905 
906 
907 
908     VkResult result = d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) );
909     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" );
910 
911     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
912   }
913 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
914 
915 
916 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
917   template <typename Dispatch>
bindImageMemory(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,Dispatch const & d) const918   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
919   {
920     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
921     return static_cast<Result>( d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
922   }
923 #else
924   template <typename Dispatch>
bindImageMemory(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::DeviceMemory memory,VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,Dispatch const & d) const925   VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const
926   {
927     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
928 
929 
930 
931     VkResult result = d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) );
932     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" );
933 
934     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
935   }
936 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
937 
938 
939   template <typename Dispatch>
getBufferMemoryRequirements(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,Dispatch const & d) const940   VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
941   {
942     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
943     d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) );
944   }
945 
946 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
947   template <typename Dispatch>
getBufferMemoryRequirements(VULKAN_HPP_NAMESPACE::Buffer buffer,Dispatch const & d) const948   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
949   {
950     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
951 
952 
953     VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
954     d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
955 
956 
957     return memoryRequirements;
958   }
959 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
960 
961 
962   template <typename Dispatch>
getImageMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,Dispatch const & d) const963   VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
964   {
965     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
966     d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) );
967   }
968 
969 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
970   template <typename Dispatch>
getImageMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image,Dispatch const & d) const971   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
972   {
973     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
974 
975 
976     VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
977     d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
978 
979 
980     return memoryRequirements;
981   }
982 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
983 
984 
985   template <typename Dispatch>
createFence(const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Fence * pFence,Dispatch const & d) const986   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Fence * pFence, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
987   {
988     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
989     return static_cast<Result>( d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkFence *>( pFence ) ) );
990   }
991 
992 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
993   template <typename Dispatch>
createFence(const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const994   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
995   {
996     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
997 
998 
999     VULKAN_HPP_NAMESPACE::Fence fence;
1000     VkResult result = d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFence *>( &fence ) );
1001     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" );
1002 
1003     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fence );
1004   }
1005 
1006 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
1007   template <typename Dispatch>
createFenceUnique(const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1008   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type Device::createFenceUnique( const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1009   {
1010     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1011 
1012 
1013     VULKAN_HPP_NAMESPACE::Fence fence;
1014     VkResult result = d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFence *>( &fence ) );
1015     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique" );
1016 
1017     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
1018   }
1019 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
1020 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1021 
1022 
1023   template <typename Dispatch>
destroyFence(VULKAN_HPP_NAMESPACE::Fence fence,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1024   VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1025   {
1026     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1027     d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1028   }
1029 
1030 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1031   template <typename Dispatch>
destroyFence(VULKAN_HPP_NAMESPACE::Fence fence,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1032   VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1033   {
1034     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1035 
1036 
1037 
1038     d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1039 
1040 
1041 
1042   }
1043 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1044 
1045 
1046   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::Fence fence,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1047   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1048   {
1049     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1050     d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1051   }
1052 
1053 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1054   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::Fence fence,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1055   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1056   {
1057     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1058 
1059 
1060 
1061     d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1062 
1063 
1064 
1065   }
1066 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1067 
1068 
1069   template <typename Dispatch>
resetFences(uint32_t fenceCount,const VULKAN_HPP_NAMESPACE::Fence * pFences,Dispatch const & d) const1070   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence * pFences, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1071   {
1072     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1073     return static_cast<Result>( d.vkResetFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ) ) );
1074   }
1075 
1076 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1077   template <typename Dispatch>
resetFences(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,Dispatch const & d) const1078   VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, Dispatch const & d ) const
1079   {
1080     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1081 
1082 
1083 
1084     VkResult result = d.vkResetFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ) );
1085     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" );
1086 
1087     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
1088   }
1089 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1090 
1091 
1092 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
1093   template <typename Dispatch>
getFenceStatus(VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const1094   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1095   {
1096     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1097     return static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
1098   }
1099 #else
1100   template <typename Dispatch>
getFenceStatus(VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const1101   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
1102   {
1103     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1104 
1105 
1106 
1107     VkResult result = d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) );
1108     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
1109 
1110     return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
1111   }
1112 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
1113 
1114 
1115   template <typename Dispatch>
waitForFences(uint32_t fenceCount,const VULKAN_HPP_NAMESPACE::Fence * pFences,VULKAN_HPP_NAMESPACE::Bool32 waitAll,uint64_t timeout,Dispatch const & d) const1116   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence * pFences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1117   {
1118     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1119     return static_cast<Result>( d.vkWaitForFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ), static_cast<VkBool32>( waitAll ), timeout ) );
1120   }
1121 
1122 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1123   template <typename Dispatch>
waitForFences(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,VULKAN_HPP_NAMESPACE::Bool32 waitAll,uint64_t timeout,Dispatch const & d) const1124   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitForFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const & d ) const
1125   {
1126     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1127 
1128 
1129 
1130     VkResult result = d.vkWaitForFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ), static_cast<VkBool32>( waitAll ), timeout );
1131     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
1132 
1133     return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
1134   }
1135 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1136 
1137 
1138   template <typename Dispatch>
createSemaphore(const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore,Dispatch const & d) const1139   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1140   {
1141     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1142     return static_cast<Result>( d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSemaphore *>( pSemaphore ) ) );
1143   }
1144 
1145 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1146   template <typename Dispatch>
createSemaphore(const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1147   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1148   {
1149     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1150 
1151 
1152     VULKAN_HPP_NAMESPACE::Semaphore semaphore;
1153     VkResult result = d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSemaphore *>( &semaphore ) );
1154     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" );
1155 
1156     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), semaphore );
1157   }
1158 
1159 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
1160   template <typename Dispatch>
createSemaphoreUnique(const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1161   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>>::type Device::createSemaphoreUnique( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1162   {
1163     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1164 
1165 
1166     VULKAN_HPP_NAMESPACE::Semaphore semaphore;
1167     VkResult result = d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSemaphore *>( &semaphore ) );
1168     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique" );
1169 
1170     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>( semaphore, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
1171   }
1172 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
1173 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1174 
1175 
1176   template <typename Dispatch>
destroySemaphore(VULKAN_HPP_NAMESPACE::Semaphore semaphore,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1177   VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1178   {
1179     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1180     d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1181   }
1182 
1183 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1184   template <typename Dispatch>
destroySemaphore(VULKAN_HPP_NAMESPACE::Semaphore semaphore,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1185   VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1186   {
1187     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1188 
1189 
1190 
1191     d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1192 
1193 
1194 
1195   }
1196 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1197 
1198 
1199   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::Semaphore semaphore,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1200   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1201   {
1202     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1203     d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1204   }
1205 
1206 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1207   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::Semaphore semaphore,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1208   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1209   {
1210     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1211 
1212 
1213 
1214     d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1215 
1216 
1217 
1218   }
1219 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1220 
1221 
1222   template <typename Dispatch>
createEvent(const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Event * pEvent,Dispatch const & d) const1223   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Event * pEvent, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1224   {
1225     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1226     return static_cast<Result>( d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkEvent *>( pEvent ) ) );
1227   }
1228 
1229 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1230   template <typename Dispatch>
createEvent(const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1231   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1232   {
1233     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1234 
1235 
1236     VULKAN_HPP_NAMESPACE::Event event;
1237     VkResult result = d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkEvent *>( &event ) );
1238     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" );
1239 
1240     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), event );
1241   }
1242 
1243 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
1244   template <typename Dispatch>
createEventUnique(const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1245   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>>::type Device::createEventUnique( const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1246   {
1247     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1248 
1249 
1250     VULKAN_HPP_NAMESPACE::Event event;
1251     VkResult result = d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkEvent *>( &event ) );
1252     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique" );
1253 
1254     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>( event, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
1255   }
1256 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
1257 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1258 
1259 
1260   template <typename Dispatch>
destroyEvent(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1261   VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1262   {
1263     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1264     d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1265   }
1266 
1267 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1268   template <typename Dispatch>
destroyEvent(VULKAN_HPP_NAMESPACE::Event event,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1269   VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1270   {
1271     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1272 
1273 
1274 
1275     d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1276 
1277 
1278 
1279   }
1280 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1281 
1282 
1283   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1284   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1285   {
1286     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1287     d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1288   }
1289 
1290 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1291   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::Event event,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1292   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1293   {
1294     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1295 
1296 
1297 
1298     d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1299 
1300 
1301 
1302   }
1303 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1304 
1305 
1306 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
1307   template <typename Dispatch>
getEventStatus(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1308   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1309   {
1310     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1311     return static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
1312   }
1313 #else
1314   template <typename Dispatch>
getEventStatus(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1315   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const
1316   {
1317     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1318 
1319 
1320 
1321     VkResult result = d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) );
1322     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus", { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } );
1323 
1324     return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
1325   }
1326 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
1327 
1328 
1329 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
1330   template <typename Dispatch>
setEvent(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1331   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1332   {
1333     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1334     return static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
1335   }
1336 #else
1337   template <typename Dispatch>
setEvent(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1338   VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const
1339   {
1340     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1341 
1342 
1343 
1344     VkResult result = d.vkSetEvent( m_device, static_cast<VkEvent>( event ) );
1345     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" );
1346 
1347     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
1348   }
1349 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
1350 
1351 
1352 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
1353   template <typename Dispatch>
resetEvent(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1354   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1355   {
1356     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1357     return static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
1358   }
1359 #else
1360   template <typename Dispatch>
resetEvent(VULKAN_HPP_NAMESPACE::Event event,Dispatch const & d) const1361   VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const
1362   {
1363     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1364 
1365 
1366 
1367     VkResult result = d.vkResetEvent( m_device, static_cast<VkEvent>( event ) );
1368     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" );
1369 
1370     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
1371   }
1372 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
1373 
1374 
1375   template <typename Dispatch>
createQueryPool(const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool,Dispatch const & d) const1376   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1377   {
1378     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1379     return static_cast<Result>( d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkQueryPool *>( pQueryPool ) ) );
1380   }
1381 
1382 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1383   template <typename Dispatch>
createQueryPool(const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1384   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1385   {
1386     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1387 
1388 
1389     VULKAN_HPP_NAMESPACE::QueryPool queryPool;
1390     VkResult result = d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkQueryPool *>( &queryPool ) );
1391     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" );
1392 
1393     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), queryPool );
1394   }
1395 
1396 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
1397   template <typename Dispatch>
createQueryPoolUnique(const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1398   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>>::type Device::createQueryPoolUnique( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1399   {
1400     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1401 
1402 
1403     VULKAN_HPP_NAMESPACE::QueryPool queryPool;
1404     VkResult result = d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkQueryPool *>( &queryPool ) );
1405     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique" );
1406 
1407     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>( queryPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
1408   }
1409 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
1410 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1411 
1412 
1413   template <typename Dispatch>
getQueryPoolResults(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,size_t dataSize,void * pData,VULKAN_HPP_NAMESPACE::DeviceSize stride,VULKAN_HPP_NAMESPACE::QueryResultFlags flags,Dispatch const & d) const1414   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void * pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1415   {
1416     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1417     return static_cast<Result>( d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, dataSize, pData, static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) ) );
1418   }
1419 
1420 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1421   template <typename DataType, typename DataTypeAllocator, typename Dispatch>
getQueryPoolResults(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,size_t dataSize,VULKAN_HPP_NAMESPACE::DeviceSize stride,VULKAN_HPP_NAMESPACE::QueryResultFlags flags,Dispatch const & d) const1422   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<DataType, DataTypeAllocator>> Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d ) const
1423   {
1424     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1425 
1426         VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
1427     std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
1428     VkResult result = d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) );
1429     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
1430 
1431     return ResultValue<std::vector<DataType, DataTypeAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
1432   }
1433 
1434   template <typename DataType, typename Dispatch>
getQueryPoolResult(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VULKAN_HPP_NAMESPACE::DeviceSize stride,VULKAN_HPP_NAMESPACE::QueryResultFlags flags,Dispatch const & d) const1435   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<DataType> Device::getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d ) const
1436   {
1437     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1438 
1439 
1440     DataType data;
1441     VkResult result = d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, sizeof( DataType ), reinterpret_cast<void *>( &data ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) );
1442     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
1443 
1444     return ResultValue<DataType>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
1445   }
1446 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1447 
1448 
1449   template <typename Dispatch>
createBuffer(const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Buffer * pBuffer,Dispatch const & d) const1450   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Buffer * pBuffer, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1451   {
1452     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1453     return static_cast<Result>( d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkBuffer *>( pBuffer ) ) );
1454   }
1455 
1456 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1457   template <typename Dispatch>
createBuffer(const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1458   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1459   {
1460     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1461 
1462 
1463     VULKAN_HPP_NAMESPACE::Buffer buffer;
1464     VkResult result = d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkBuffer *>( &buffer ) );
1465     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" );
1466 
1467     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), buffer );
1468   }
1469 
1470 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
1471   template <typename Dispatch>
createBufferUnique(const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1472   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>>::type Device::createBufferUnique( const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1473   {
1474     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1475 
1476 
1477     VULKAN_HPP_NAMESPACE::Buffer buffer;
1478     VkResult result = d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkBuffer *>( &buffer ) );
1479     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique" );
1480 
1481     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>( buffer, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
1482   }
1483 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
1484 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1485 
1486 
1487   template <typename Dispatch>
destroyBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1488   VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1489   {
1490     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1491     d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1492   }
1493 
1494 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1495   template <typename Dispatch>
destroyBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1496   VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1497   {
1498     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1499 
1500 
1501 
1502     d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1503 
1504 
1505 
1506   }
1507 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1508 
1509 
1510   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::Buffer buffer,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1511   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1512   {
1513     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1514     d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1515   }
1516 
1517 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1518   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::Buffer buffer,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1519   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1520   {
1521     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1522 
1523 
1524 
1525     d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1526 
1527 
1528 
1529   }
1530 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1531 
1532 
1533   template <typename Dispatch>
createBufferView(const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::BufferView * pView,Dispatch const & d) const1534   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::BufferView * pView, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1535   {
1536     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1537     return static_cast<Result>( d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkBufferView *>( pView ) ) );
1538   }
1539 
1540 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1541   template <typename Dispatch>
createBufferView(const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1542   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1543   {
1544     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1545 
1546 
1547     VULKAN_HPP_NAMESPACE::BufferView view;
1548     VkResult result = d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkBufferView *>( &view ) );
1549     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" );
1550 
1551     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), view );
1552   }
1553 
1554 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
1555   template <typename Dispatch>
createBufferViewUnique(const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1556   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>>::type Device::createBufferViewUnique( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1557   {
1558     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1559 
1560 
1561     VULKAN_HPP_NAMESPACE::BufferView view;
1562     VkResult result = d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkBufferView *>( &view ) );
1563     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique" );
1564 
1565     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>( view, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
1566   }
1567 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
1568 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1569 
1570 
1571   template <typename Dispatch>
destroyBufferView(VULKAN_HPP_NAMESPACE::BufferView bufferView,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1572   VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1573   {
1574     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1575     d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1576   }
1577 
1578 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1579   template <typename Dispatch>
destroyBufferView(VULKAN_HPP_NAMESPACE::BufferView bufferView,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1580   VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1581   {
1582     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1583 
1584 
1585 
1586     d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1587 
1588 
1589 
1590   }
1591 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1592 
1593 
1594   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::BufferView bufferView,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1595   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1596   {
1597     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1598     d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1599   }
1600 
1601 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1602   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::BufferView bufferView,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1603   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1604   {
1605     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1606 
1607 
1608 
1609     d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1610 
1611 
1612 
1613   }
1614 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1615 
1616 
1617   template <typename Dispatch>
createImage(const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Image * pImage,Dispatch const & d) const1618   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Image * pImage, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1619   {
1620     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1621     return static_cast<Result>( d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkImage *>( pImage ) ) );
1622   }
1623 
1624 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1625   template <typename Dispatch>
createImage(const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1626   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1627   {
1628     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1629 
1630 
1631     VULKAN_HPP_NAMESPACE::Image image;
1632     VkResult result = d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkImage *>( &image ) );
1633     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" );
1634 
1635     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), image );
1636   }
1637 
1638 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
1639   template <typename Dispatch>
createImageUnique(const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1640   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>>::type Device::createImageUnique( const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1641   {
1642     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1643 
1644 
1645     VULKAN_HPP_NAMESPACE::Image image;
1646     VkResult result = d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkImage *>( &image ) );
1647     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique" );
1648 
1649     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>( image, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
1650   }
1651 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
1652 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1653 
1654 
1655   template <typename Dispatch>
destroyImage(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1656   VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1657   {
1658     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1659     d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1660   }
1661 
1662 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1663   template <typename Dispatch>
destroyImage(VULKAN_HPP_NAMESPACE::Image image,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1664   VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1665   {
1666     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1667 
1668 
1669 
1670     d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1671 
1672 
1673 
1674   }
1675 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1676 
1677 
1678   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1679   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1680   {
1681     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1682     d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1683   }
1684 
1685 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1686   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::Image image,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1687   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1688   {
1689     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1690 
1691 
1692 
1693     d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1694 
1695 
1696 
1697   }
1698 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1699 
1700 
1701   template <typename Dispatch>
getImageSubresourceLayout(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource,VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout,Dispatch const & d) const1702   VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource, VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1703   {
1704     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1705     d.vkGetImageSubresourceLayout( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource *>( pSubresource ), reinterpret_cast<VkSubresourceLayout *>( pLayout ) );
1706   }
1707 
1708 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1709   template <typename Dispatch>
getImageSubresourceLayout(VULKAN_HPP_NAMESPACE::Image image,const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource,Dispatch const & d) const1710   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1711   {
1712     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1713 
1714 
1715     VULKAN_HPP_NAMESPACE::SubresourceLayout layout;
1716     d.vkGetImageSubresourceLayout( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource *>( &subresource ), reinterpret_cast<VkSubresourceLayout *>( &layout ) );
1717 
1718 
1719     return layout;
1720   }
1721 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1722 
1723 
1724   template <typename Dispatch>
createImageView(const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::ImageView * pView,Dispatch const & d) const1725   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::ImageView * pView, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1726   {
1727     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1728     return static_cast<Result>( d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkImageView *>( pView ) ) );
1729   }
1730 
1731 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1732   template <typename Dispatch>
createImageView(const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1733   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1734   {
1735     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1736 
1737 
1738     VULKAN_HPP_NAMESPACE::ImageView view;
1739     VkResult result = d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkImageView *>( &view ) );
1740     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" );
1741 
1742     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), view );
1743   }
1744 
1745 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
1746   template <typename Dispatch>
createImageViewUnique(const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1747   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>>::type Device::createImageViewUnique( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1748   {
1749     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1750 
1751 
1752     VULKAN_HPP_NAMESPACE::ImageView view;
1753     VkResult result = d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkImageView *>( &view ) );
1754     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique" );
1755 
1756     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>( view, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
1757   }
1758 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
1759 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1760 
1761 
1762   template <typename Dispatch>
destroyImageView(VULKAN_HPP_NAMESPACE::ImageView imageView,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1763   VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1764   {
1765     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1766     d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1767   }
1768 
1769 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1770   template <typename Dispatch>
destroyImageView(VULKAN_HPP_NAMESPACE::ImageView imageView,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1771   VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1772   {
1773     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1774 
1775 
1776 
1777     d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1778 
1779 
1780 
1781   }
1782 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1783 
1784 
1785   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::ImageView imageView,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1786   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1787   {
1788     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1789     d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1790   }
1791 
1792 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1793   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::ImageView imageView,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1794   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1795   {
1796     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1797 
1798 
1799 
1800     d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1801 
1802 
1803 
1804   }
1805 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1806 
1807 
1808   template <typename Dispatch>
createPipelineCache(const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache,Dispatch const & d) const1809   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1810   {
1811     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1812     return static_cast<Result>( d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkPipelineCache *>( pPipelineCache ) ) );
1813   }
1814 
1815 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1816   template <typename Dispatch>
createPipelineCache(const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1817   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1818   {
1819     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1820 
1821 
1822     VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
1823     VkResult result = d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipelineCache *>( &pipelineCache ) );
1824     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" );
1825 
1826     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelineCache );
1827   }
1828 
1829 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
1830   template <typename Dispatch>
createPipelineCacheUnique(const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1831   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>>::type Device::createPipelineCacheUnique( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1832   {
1833     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1834 
1835 
1836     VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
1837     VkResult result = d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipelineCache *>( &pipelineCache ) );
1838     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique" );
1839 
1840     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>( pipelineCache, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
1841   }
1842 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
1843 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1844 
1845 
1846   template <typename Dispatch>
destroyPipelineCache(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1847   VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1848   {
1849     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1850     d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1851   }
1852 
1853 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1854   template <typename Dispatch>
destroyPipelineCache(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1855   VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1856   {
1857     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1858 
1859 
1860 
1861     d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1862 
1863 
1864 
1865   }
1866 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1867 
1868 
1869   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const1870   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1871   {
1872     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1873     d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1874   }
1875 
1876 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1877   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1878   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
1879   {
1880     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1881 
1882 
1883 
1884     d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1885 
1886 
1887 
1888   }
1889 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1890 
1891 
1892   template <typename Dispatch>
createGraphicsPipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,uint32_t createInfoCount,const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,Dispatch const & d) const1893   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1894   {
1895     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1896     return static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkPipeline *>( pPipelines ) ) );
1897   }
1898 
1899 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1900   template <typename PipelineAllocator, typename Dispatch>
createGraphicsPipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1901   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1902   {
1903     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1904 
1905 
1906     std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
1907     VkResult result = d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
1908     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
1909 
1910     return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
1911   }
1912 
1913   template <typename PipelineAllocator, typename Dispatch, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type>
createGraphicsPipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const1914   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
1915   {
1916     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1917 
1918 
1919     std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
1920     VkResult result = d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
1921     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
1922 
1923     return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
1924   }
1925 
1926   template <typename Dispatch>
createGraphicsPipeline(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1927   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1928   {
1929     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1930 
1931 
1932     VULKAN_HPP_NAMESPACE::Pipeline pipeline;
1933     VkResult result = d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) );
1934     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
1935 
1936     return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline );
1937   }
1938 
1939 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
1940   template <typename Dispatch, typename PipelineAllocator>
createGraphicsPipelinesUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1941   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1942   {
1943     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1944 
1945 
1946     std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
1947     VkResult result = d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
1948     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
1949     std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
1950     uniquePipelines.reserve( createInfos.size() );
1951     ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
1952     for ( auto const & pipeline : pipelines )
1953     {
1954       uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
1955     }
1956     return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
1957   }
1958 
1959   template <typename Dispatch, typename PipelineAllocator, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
createGraphicsPipelinesUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const1960   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
1961   {
1962     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1963 
1964 
1965     std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
1966     VkResult result = d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
1967     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
1968     std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
1969     uniquePipelines.reserve( createInfos.size() );
1970     ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
1971     for ( auto const & pipeline : pipelines )
1972     {
1973       uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
1974     }
1975     return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
1976   }
1977 
1978   template <typename Dispatch>
createGraphicsPipelineUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const1979   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
1980   {
1981     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1982 
1983 
1984     VULKAN_HPP_NAMESPACE::Pipeline pipeline;
1985     VkResult result = d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) );
1986     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
1987 
1988     return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
1989   }
1990 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
1991 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
1992 
1993 
1994   template <typename Dispatch>
createComputePipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,uint32_t createInfoCount,const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,Dispatch const & d) const1995   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
1996   {
1997     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
1998     return static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkPipeline *>( pPipelines ) ) );
1999   }
2000 
2001 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2002   template <typename PipelineAllocator, typename Dispatch>
createComputePipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2003   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
2004   {
2005     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2006 
2007 
2008     std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
2009     VkResult result = d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
2010     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
2011 
2012     return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
2013   }
2014 
2015   template <typename PipelineAllocator, typename Dispatch, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type>
createComputePipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const2016   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
2017   {
2018     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2019 
2020 
2021     std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
2022     VkResult result = d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
2023     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
2024 
2025     return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
2026   }
2027 
2028   template <typename Dispatch>
createComputePipeline(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2029   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
2030   {
2031     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2032 
2033 
2034     VULKAN_HPP_NAMESPACE::Pipeline pipeline;
2035     VkResult result = d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) );
2036     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
2037 
2038     return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline );
2039   }
2040 
2041 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
2042   template <typename Dispatch, typename PipelineAllocator>
createComputePipelinesUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2043   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
2044   {
2045     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2046 
2047 
2048     std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
2049     VkResult result = d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
2050     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
2051     std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
2052     uniquePipelines.reserve( createInfos.size() );
2053     ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
2054     for ( auto const & pipeline : pipelines )
2055     {
2056       uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
2057     }
2058     return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
2059   }
2060 
2061   template <typename Dispatch, typename PipelineAllocator, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
createComputePipelinesUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,PipelineAllocator & pipelineAllocator,Dispatch const & d) const2062   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
2063   {
2064     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2065 
2066 
2067     std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
2068     VkResult result = d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
2069     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
2070     std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
2071     uniquePipelines.reserve( createInfos.size() );
2072     ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
2073     for ( auto const & pipeline : pipelines )
2074     {
2075       uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
2076     }
2077     return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
2078   }
2079 
2080   template <typename Dispatch>
createComputePipelineUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2081   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
2082   {
2083     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2084 
2085 
2086     VULKAN_HPP_NAMESPACE::Pipeline pipeline;
2087     VkResult result = d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) );
2088     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
2089 
2090     return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
2091   }
2092 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
2093 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2094 
2095 
2096   template <typename Dispatch>
destroyPipeline(VULKAN_HPP_NAMESPACE::Pipeline pipeline,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2097   VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
2098   {
2099     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2100     d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2101   }
2102 
2103 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2104   template <typename Dispatch>
destroyPipeline(VULKAN_HPP_NAMESPACE::Pipeline pipeline,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2105   VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2106   {
2107     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2108 
2109 
2110 
2111     d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2112 
2113 
2114 
2115   }
2116 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2117 
2118 
2119   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::Pipeline pipeline,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2120   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
2121   {
2122     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2123     d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2124   }
2125 
2126 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2127   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::Pipeline pipeline,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2128   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2129   {
2130     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2131 
2132 
2133 
2134     d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2135 
2136 
2137 
2138   }
2139 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2140 
2141 
2142   template <typename Dispatch>
createPipelineLayout(const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout,Dispatch const & d) const2143   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
2144   {
2145     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2146     return static_cast<Result>( d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkPipelineLayout *>( pPipelineLayout ) ) );
2147   }
2148 
2149 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2150   template <typename Dispatch>
createPipelineLayout(const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2151   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
2152   {
2153     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2154 
2155 
2156     VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
2157     VkResult result = d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) );
2158     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" );
2159 
2160     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelineLayout );
2161   }
2162 
2163 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
2164   template <typename Dispatch>
createPipelineLayoutUnique(const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2165   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>>::type Device::createPipelineLayoutUnique( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
2166   {
2167     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2168 
2169 
2170     VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
2171     VkResult result = d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) );
2172     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique" );
2173 
2174     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>( pipelineLayout, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
2175   }
2176 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
2177 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2178 
2179 
2180   template <typename Dispatch>
destroyPipelineLayout(VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2181   VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
2182   {
2183     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2184     d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2185   }
2186 
2187 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2188   template <typename Dispatch>
destroyPipelineLayout(VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2189   VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2190   {
2191     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2192 
2193 
2194 
2195     d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2196 
2197 
2198 
2199   }
2200 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2201 
2202 
2203   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2204   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
2205   {
2206     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2207     d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2208   }
2209 
2210 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2211   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2212   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2213   {
2214     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2215 
2216 
2217 
2218     d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2219 
2220 
2221 
2222   }
2223 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2224 
2225 
2226   template <typename Dispatch>
createSampler(const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Sampler * pSampler,Dispatch const & d) const2227   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Sampler * pSampler, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
2228   {
2229     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2230     return static_cast<Result>( d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSampler *>( pSampler ) ) );
2231   }
2232 
2233 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2234   template <typename Dispatch>
createSampler(const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2235   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
2236   {
2237     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2238 
2239 
2240     VULKAN_HPP_NAMESPACE::Sampler sampler;
2241     VkResult result = d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSampler *>( &sampler ) );
2242     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" );
2243 
2244     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), sampler );
2245   }
2246 
2247 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
2248   template <typename Dispatch>
createSamplerUnique(const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2249   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>>::type Device::createSamplerUnique( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
2250   {
2251     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2252 
2253 
2254     VULKAN_HPP_NAMESPACE::Sampler sampler;
2255     VkResult result = d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSampler *>( &sampler ) );
2256     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique" );
2257 
2258     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>( sampler, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
2259   }
2260 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
2261 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2262 
2263 
2264   template <typename Dispatch>
destroySampler(VULKAN_HPP_NAMESPACE::Sampler sampler,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2265   VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
2266   {
2267     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2268     d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2269   }
2270 
2271 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2272   template <typename Dispatch>
destroySampler(VULKAN_HPP_NAMESPACE::Sampler sampler,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2273   VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2274   {
2275     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2276 
2277 
2278 
2279     d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2280 
2281 
2282 
2283   }
2284 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2285 
2286 
2287   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::Sampler sampler,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2288   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
2289   {
2290     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2291     d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2292   }
2293 
2294 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2295   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::Sampler sampler,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2296   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2297   {
2298     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2299 
2300 
2301 
2302     d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2303 
2304 
2305 
2306   }
2307 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2308 
2309 
2310   template <typename Dispatch>
createDescriptorSetLayout(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout,Dispatch const & d) const2311   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
2312   {
2313     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2314     return static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDescriptorSetLayout *>( pSetLayout ) ) );
2315   }
2316 
2317 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2318   template <typename Dispatch>
createDescriptorSetLayout(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2319   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
2320   {
2321     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2322 
2323 
2324     VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
2325     VkResult result = d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) );
2326     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" );
2327 
2328     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), setLayout );
2329   }
2330 
2331 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
2332   template <typename Dispatch>
createDescriptorSetLayoutUnique(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2333   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>>::type Device::createDescriptorSetLayoutUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
2334   {
2335     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2336 
2337 
2338     VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
2339     VkResult result = d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) );
2340     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique" );
2341 
2342     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>( setLayout, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
2343   }
2344 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
2345 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2346 
2347 
2348   template <typename Dispatch>
destroyDescriptorSetLayout(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2349   VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
2350   {
2351     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2352     d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2353   }
2354 
2355 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2356   template <typename Dispatch>
destroyDescriptorSetLayout(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2357   VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2358   {
2359     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2360 
2361 
2362 
2363     d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2364 
2365 
2366 
2367   }
2368 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2369 
2370 
2371   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2372   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
2373   {
2374     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2375     d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2376   }
2377 
2378 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2379   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2380   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2381   {
2382     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2383 
2384 
2385 
2386     d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2387 
2388 
2389 
2390   }
2391 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2392 
2393 
2394   template <typename Dispatch>
createDescriptorPool(const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool,Dispatch const & d) const2395   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
2396   {
2397     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2398     return static_cast<Result>( d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDescriptorPool *>( pDescriptorPool ) ) );
2399   }
2400 
2401 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2402   template <typename Dispatch>
createDescriptorPool(const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2403   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
2404   {
2405     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2406 
2407 
2408     VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
2409     VkResult result = d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) );
2410     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" );
2411 
2412     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorPool );
2413   }
2414 
2415 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
2416   template <typename Dispatch>
createDescriptorPoolUnique(const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2417   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>>::type Device::createDescriptorPoolUnique( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
2418   {
2419     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2420 
2421 
2422     VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
2423     VkResult result = d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) );
2424     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique" );
2425 
2426     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>( descriptorPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
2427   }
2428 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
2429 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2430 
2431 
2432 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
2433   template <typename Dispatch>
resetDescriptorPool(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags,Dispatch const & d) const2434   VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
2435   {
2436     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2437     return static_cast<Result>( d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
2438   }
2439 #else
2440   template <typename Dispatch>
resetDescriptorPool(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags,Dispatch const & d) const2441   VULKAN_HPP_INLINE void Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2442   {
2443     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2444 
2445 
2446 
2447     d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) );
2448 
2449 
2450 
2451   }
2452 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
2453 
2454 
2455   template <typename Dispatch>
allocateDescriptorSets(const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo,VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,Dispatch const & d) const2456   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo, VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
2457   {
2458     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2459     return static_cast<Result>( d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( pAllocateInfo ), reinterpret_cast<VkDescriptorSet *>( pDescriptorSets ) ) );
2460   }
2461 
2462 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2463   template <typename DescriptorSetAllocator, typename Dispatch>
allocateDescriptorSets(const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,Dispatch const & d) const2464   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator>>::type Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const
2465   {
2466     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2467 
2468 
2469     std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount );
2470     VkResult result = d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
2471     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" );
2472 
2473     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorSets );
2474   }
2475 
2476   template <typename DescriptorSetAllocator, typename Dispatch, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, DescriptorSet>::value, int>::type>
allocateDescriptorSets(const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,DescriptorSetAllocator & descriptorSetAllocator,Dispatch const & d) const2477   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator>>::type Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, DescriptorSetAllocator & descriptorSetAllocator, Dispatch const & d ) const
2478   {
2479     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2480 
2481 
2482     std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount, descriptorSetAllocator );
2483     VkResult result = d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
2484     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" );
2485 
2486     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorSets );
2487   }
2488 
2489 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
2490   template <typename Dispatch, typename DescriptorSetAllocator>
allocateDescriptorSetsUnique(const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,Dispatch const & d) const2491   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const
2492   {
2493     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2494 
2495 
2496     std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
2497     VkResult result = d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
2498     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
2499     std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets;
2500     uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount );
2501     PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
2502     for ( auto const & descriptorSet : descriptorSets )
2503     {
2504       uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSet, deleter ) );
2505     }
2506     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueDescriptorSets ) );
2507   }
2508 
2509   template <typename Dispatch, typename DescriptorSetAllocator, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<DescriptorSet, Dispatch>>::value, int>::type>
allocateDescriptorSetsUnique(const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,DescriptorSetAllocator & descriptorSetAllocator,Dispatch const & d) const2510   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, DescriptorSetAllocator & descriptorSetAllocator, Dispatch const & d ) const
2511   {
2512     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2513 
2514 
2515     std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
2516     VkResult result = d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
2517     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
2518     std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets( descriptorSetAllocator );
2519     uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount );
2520     PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
2521     for ( auto const & descriptorSet : descriptorSets )
2522     {
2523       uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSet, deleter ) );
2524     }
2525     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueDescriptorSets ) );
2526   }
2527 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
2528 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2529 
2530 
2531   template <typename Dispatch>
freeDescriptorSets(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,uint32_t descriptorSetCount,const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,Dispatch const & d) const2532   VULKAN_HPP_INLINE Result Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
2533   {
2534     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2535     return static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
2536   }
2537 
2538 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2539   template <typename Dispatch>
freeDescriptorSets(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,Dispatch const & d) const2540   VULKAN_HPP_INLINE void Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2541   {
2542     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2543 
2544 
2545 
2546     d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) );
2547 
2548 
2549 
2550   }
2551 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2552 
2553 
2554   template <typename Dispatch>
Result(Device::free)2555   VULKAN_HPP_INLINE Result ( Device::free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
2556   {
2557     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2558     return static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
2559   }
2560 
2561 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2562   template <typename Dispatch>
2563   VULKAN_HPP_INLINE void ( Device::free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2564   {
2565     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2566 
2567 
2568 
2569     d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) );
2570 
2571 
2572 
2573   }
2574 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2575 
2576 
2577   template <typename Dispatch>
updateDescriptorSets(uint32_t descriptorWriteCount,const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies,Dispatch const & d) const2578   VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, uint32_t descriptorCopyCount, const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
2579   {
2580     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2581     d.vkUpdateDescriptorSets( m_device, descriptorWriteCount, reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ), descriptorCopyCount, reinterpret_cast<const VkCopyDescriptorSet *>( pDescriptorCopies ) );
2582   }
2583 
2584 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2585   template <typename Dispatch>
updateDescriptorSets(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies,Dispatch const & d) const2586   VULKAN_HPP_INLINE void Device::updateDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2587   {
2588     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2589 
2590 
2591 
2592     d.vkUpdateDescriptorSets( m_device, descriptorWrites.size(), reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ), descriptorCopies.size(), reinterpret_cast<const VkCopyDescriptorSet *>( descriptorCopies.data() ) );
2593 
2594 
2595 
2596   }
2597 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2598 
2599 
2600   template <typename Dispatch>
createFramebuffer(const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer,Dispatch const & d) const2601   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
2602   {
2603     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2604     return static_cast<Result>( d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkFramebuffer *>( pFramebuffer ) ) );
2605   }
2606 
2607 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2608   template <typename Dispatch>
createFramebuffer(const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2609   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
2610   {
2611     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2612 
2613 
2614     VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
2615     VkResult result = d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFramebuffer *>( &framebuffer ) );
2616     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" );
2617 
2618     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), framebuffer );
2619   }
2620 
2621 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
2622   template <typename Dispatch>
createFramebufferUnique(const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2623   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>>::type Device::createFramebufferUnique( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
2624   {
2625     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2626 
2627 
2628     VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
2629     VkResult result = d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFramebuffer *>( &framebuffer ) );
2630     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique" );
2631 
2632     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>( framebuffer, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
2633   }
2634 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
2635 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2636 
2637 
2638   template <typename Dispatch>
destroyFramebuffer(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2639   VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
2640   {
2641     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2642     d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2643   }
2644 
2645 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2646   template <typename Dispatch>
destroyFramebuffer(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2647   VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2648   {
2649     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2650 
2651 
2652 
2653     d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2654 
2655 
2656 
2657   }
2658 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2659 
2660 
2661   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2662   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
2663   {
2664     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2665     d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2666   }
2667 
2668 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2669   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2670   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2671   {
2672     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2673 
2674 
2675 
2676     d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2677 
2678 
2679 
2680   }
2681 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2682 
2683 
2684   template <typename Dispatch>
createRenderPass(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,Dispatch const & d) const2685   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
2686   {
2687     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2688     return static_cast<Result>( d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
2689   }
2690 
2691 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2692   template <typename Dispatch>
createRenderPass(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2693   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
2694   {
2695     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2696 
2697 
2698     VULKAN_HPP_NAMESPACE::RenderPass renderPass;
2699     VkResult result = d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkRenderPass *>( &renderPass ) );
2700     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" );
2701 
2702     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), renderPass );
2703   }
2704 
2705 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
2706   template <typename Dispatch>
createRenderPassUnique(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2707   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type Device::createRenderPassUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
2708   {
2709     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2710 
2711 
2712     VULKAN_HPP_NAMESPACE::RenderPass renderPass;
2713     VkResult result = d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkRenderPass *>( &renderPass ) );
2714     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique" );
2715 
2716     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
2717   }
2718 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
2719 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2720 
2721 
2722   template <typename Dispatch>
destroyRenderPass(VULKAN_HPP_NAMESPACE::RenderPass renderPass,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2723   VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
2724   {
2725     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2726     d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2727   }
2728 
2729 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2730   template <typename Dispatch>
destroyRenderPass(VULKAN_HPP_NAMESPACE::RenderPass renderPass,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2731   VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2732   {
2733     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2734 
2735 
2736 
2737     d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2738 
2739 
2740 
2741   }
2742 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2743 
2744 
2745   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::RenderPass renderPass,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const2746   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
2747   {
2748     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2749     d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2750   }
2751 
2752 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2753   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::RenderPass renderPass,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2754   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2755   {
2756     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2757 
2758 
2759 
2760     d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2761 
2762 
2763 
2764   }
2765 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2766 
2767 
2768   template <typename Dispatch>
getRenderAreaGranularity(VULKAN_HPP_NAMESPACE::RenderPass renderPass,VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,Dispatch const & d) const2769   VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
2770   {
2771     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2772     d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( pGranularity ) );
2773   }
2774 
2775 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2776   template <typename Dispatch>
getRenderAreaGranularity(VULKAN_HPP_NAMESPACE::RenderPass renderPass,Dispatch const & d) const2777   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2778   {
2779     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2780 
2781 
2782     VULKAN_HPP_NAMESPACE::Extent2D granularity;
2783     d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( &granularity ) );
2784 
2785 
2786     return granularity;
2787   }
2788 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2789 
2790 
2791   template <typename Dispatch>
createCommandPool(const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool,Dispatch const & d) const2792   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
2793   {
2794     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2795     return static_cast<Result>( d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkCommandPool *>( pCommandPool ) ) );
2796   }
2797 
2798 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2799   template <typename Dispatch>
createCommandPool(const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2800   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
2801   {
2802     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2803 
2804 
2805     VULKAN_HPP_NAMESPACE::CommandPool commandPool;
2806     VkResult result = d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkCommandPool *>( &commandPool ) );
2807     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" );
2808 
2809     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), commandPool );
2810   }
2811 
2812 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
2813   template <typename Dispatch>
createCommandPoolUnique(const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const2814   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>>::type Device::createCommandPoolUnique( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
2815   {
2816     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2817 
2818 
2819     VULKAN_HPP_NAMESPACE::CommandPool commandPool;
2820     VkResult result = d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkCommandPool *>( &commandPool ) );
2821     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique" );
2822 
2823     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>( commandPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
2824   }
2825 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
2826 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2827 
2828 
2829 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
2830   template <typename Dispatch>
resetCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool,VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags,Dispatch const & d) const2831   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
2832   {
2833     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2834     return static_cast<Result>( d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
2835   }
2836 #else
2837   template <typename Dispatch>
resetCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool,VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags,Dispatch const & d) const2838   VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const & d ) const
2839   {
2840     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2841 
2842 
2843 
2844     VkResult result = d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) );
2845     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" );
2846 
2847     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
2848   }
2849 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
2850 
2851 
2852   template <typename Dispatch>
allocateCommandBuffers(const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo,VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,Dispatch const & d) const2853   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo, VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
2854   {
2855     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2856     return static_cast<Result>( d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( pAllocateInfo ), reinterpret_cast<VkCommandBuffer *>( pCommandBuffers ) ) );
2857   }
2858 
2859 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2860   template <typename CommandBufferAllocator, typename Dispatch>
allocateCommandBuffers(const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,Dispatch const & d) const2861   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator>>::type Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const
2862   {
2863     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2864 
2865 
2866     std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount );
2867     VkResult result = d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
2868     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" );
2869 
2870     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), commandBuffers );
2871   }
2872 
2873   template <typename CommandBufferAllocator, typename Dispatch, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, CommandBuffer>::value, int>::type>
allocateCommandBuffers(const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,CommandBufferAllocator & commandBufferAllocator,Dispatch const & d) const2874   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator>>::type Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, CommandBufferAllocator & commandBufferAllocator, Dispatch const & d ) const
2875   {
2876     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2877 
2878 
2879     std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount, commandBufferAllocator );
2880     VkResult result = d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
2881     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" );
2882 
2883     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), commandBuffers );
2884   }
2885 
2886 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
2887   template <typename Dispatch, typename CommandBufferAllocator>
allocateCommandBuffersUnique(const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,Dispatch const & d) const2888   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator>>::type Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const
2889   {
2890     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2891 
2892 
2893     std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
2894     VkResult result = d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
2895     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
2896     std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers;
2897     uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount );
2898     PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d );
2899     for ( auto const & commandBuffer : commandBuffers )
2900     {
2901       uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffer, deleter ) );
2902     }
2903     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueCommandBuffers ) );
2904   }
2905 
2906   template <typename Dispatch, typename CommandBufferAllocator, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<CommandBuffer, Dispatch>>::value, int>::type>
allocateCommandBuffersUnique(const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,CommandBufferAllocator & commandBufferAllocator,Dispatch const & d) const2907   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator>>::type Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, CommandBufferAllocator & commandBufferAllocator, Dispatch const & d ) const
2908   {
2909     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2910 
2911 
2912     std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
2913     VkResult result = d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
2914     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
2915     std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers( commandBufferAllocator );
2916     uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount );
2917     PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d );
2918     for ( auto const & commandBuffer : commandBuffers )
2919     {
2920       uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffer, deleter ) );
2921     }
2922     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueCommandBuffers ) );
2923   }
2924 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
2925 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2926 
2927 
2928   template <typename Dispatch>
freeCommandBuffers(VULKAN_HPP_NAMESPACE::CommandPool commandPool,uint32_t commandBufferCount,const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,Dispatch const & d) const2929   VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
2930   {
2931     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2932     d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
2933   }
2934 
2935 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2936   template <typename Dispatch>
freeCommandBuffers(VULKAN_HPP_NAMESPACE::CommandPool commandPool,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,Dispatch const & d) const2937   VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2938   {
2939     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2940 
2941 
2942 
2943     d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
2944 
2945 
2946 
2947   }
2948 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2949 
2950 
2951   template <typename Dispatch>
2952   VULKAN_HPP_INLINE void ( Device::free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
2953   {
2954     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2955     d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
2956   }
2957 
2958 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2959   template <typename Dispatch>
2960   VULKAN_HPP_INLINE void ( Device::free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
2961   {
2962     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2963 
2964 
2965 
2966     d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
2967 
2968 
2969 
2970   }
2971 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2972 
2973 
2974   template <typename Dispatch>
begin(const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo,Dispatch const & d) const2975   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
2976   {
2977     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2978     return static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( pBeginInfo ) ) );
2979   }
2980 
2981 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2982   template <typename Dispatch>
begin(const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo,Dispatch const & d) const2983   VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo, Dispatch const & d ) const
2984   {
2985     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
2986 
2987 
2988 
2989     VkResult result = d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( &beginInfo ) );
2990     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" );
2991 
2992     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
2993   }
2994 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
2995 
2996 
2997 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
2998   template <typename Dispatch>
end(Dispatch const & d) const2999   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3000   {
3001     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3002     return static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
3003   }
3004 #else
3005   template <typename Dispatch>
end(Dispatch const & d) const3006   VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::end( Dispatch const & d ) const
3007   {
3008     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3009 
3010 
3011 
3012     VkResult result = d.vkEndCommandBuffer( m_commandBuffer );
3013     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" );
3014 
3015     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
3016   }
3017 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
3018 
3019 
3020 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
3021   template <typename Dispatch>
reset(VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags,Dispatch const & d) const3022   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3023   {
3024     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3025     return static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
3026   }
3027 #else
3028   template <typename Dispatch>
reset(VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags,Dispatch const & d) const3029   VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const
3030   {
3031     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3032 
3033 
3034 
3035     VkResult result = d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) );
3036     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" );
3037 
3038     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
3039   }
3040 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
3041 
3042 
3043   template <typename Dispatch>
bindPipeline(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::Pipeline pipeline,Dispatch const & d) const3044   VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3045   {
3046     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3047     d.vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
3048   }
3049 
3050 
3051   template <typename Dispatch>
setViewport(uint32_t firstViewport,uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::Viewport * pViewports,Dispatch const & d) const3052   VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport * pViewports, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3053   {
3054     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3055     d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
3056   }
3057 
3058 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3059   template <typename Dispatch>
setViewport(uint32_t firstViewport,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,Dispatch const & d) const3060   VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3061   {
3062     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3063 
3064 
3065 
3066     d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
3067 
3068 
3069 
3070   }
3071 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3072 
3073 
3074   template <typename Dispatch>
setScissor(uint32_t firstScissor,uint32_t scissorCount,const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,Dispatch const & d) const3075   VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3076   {
3077     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3078     d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
3079   }
3080 
3081 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3082   template <typename Dispatch>
setScissor(uint32_t firstScissor,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,Dispatch const & d) const3083   VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3084   {
3085     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3086 
3087 
3088 
3089     d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
3090 
3091 
3092 
3093   }
3094 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3095 
3096 
3097   template <typename Dispatch>
setLineWidth(float lineWidth,Dispatch const & d) const3098   VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3099   {
3100     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3101     d.vkCmdSetLineWidth( m_commandBuffer, lineWidth );
3102   }
3103 
3104 
3105   template <typename Dispatch>
setDepthBias(float depthBiasConstantFactor,float depthBiasClamp,float depthBiasSlopeFactor,Dispatch const & d) const3106   VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3107   {
3108     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3109     d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
3110   }
3111 
3112 
3113   template <typename Dispatch>
setBlendConstants(const float blendConstants[4],Dispatch const & d) const3114   VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3115   {
3116     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3117     d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
3118   }
3119 
3120 
3121   template <typename Dispatch>
setDepthBounds(float minDepthBounds,float maxDepthBounds,Dispatch const & d) const3122   VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3123   {
3124     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3125     d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
3126   }
3127 
3128 
3129   template <typename Dispatch>
setStencilCompareMask(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,uint32_t compareMask,Dispatch const & d) const3130   VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3131   {
3132     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3133     d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
3134   }
3135 
3136 
3137   template <typename Dispatch>
setStencilWriteMask(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,uint32_t writeMask,Dispatch const & d) const3138   VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3139   {
3140     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3141     d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
3142   }
3143 
3144 
3145   template <typename Dispatch>
setStencilReference(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,uint32_t reference,Dispatch const & d) const3146   VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3147   {
3148     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3149     d.vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
3150   }
3151 
3152 
3153   template <typename Dispatch>
bindDescriptorSets(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t firstSet,uint32_t descriptorSetCount,const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets,Dispatch const & d) const3154   VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t * pDynamicOffsets, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3155   {
3156     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3157     d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ), dynamicOffsetCount, pDynamicOffsets );
3158   }
3159 
3160 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3161   template <typename Dispatch>
bindDescriptorSets(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,VULKAN_HPP_NAMESPACE::PipelineLayout layout,uint32_t firstSet,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & dynamicOffsets,Dispatch const & d) const3162   VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & dynamicOffsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3163   {
3164     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3165 
3166 
3167 
3168     d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ), dynamicOffsets.size(), dynamicOffsets.data() );
3169 
3170 
3171 
3172   }
3173 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3174 
3175 
3176   template <typename Dispatch>
bindIndexBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::IndexType indexType,Dispatch const & d) const3177   VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3178   {
3179     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3180     d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkIndexType>( indexType ) );
3181   }
3182 
3183 
3184   template <typename Dispatch>
bindVertexBuffers(uint32_t firstBinding,uint32_t bindingCount,const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,Dispatch const & d) const3185   VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3186   {
3187     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3188     d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), reinterpret_cast<const VkDeviceSize *>( pOffsets ) );
3189   }
3190 
3191 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3192   template <typename Dispatch>
bindVertexBuffers(uint32_t firstBinding,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,Dispatch const & d) const3193   VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
3194   {
3195     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3196 #ifdef VULKAN_HPP_NO_EXCEPTIONS
3197     VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
3198 #else
3199     if ( buffers.size() != offsets.size() )
3200   {
3201     throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
3202   }
3203 #endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
3204 
3205 
3206     d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, buffers.size(), reinterpret_cast<const VkBuffer *>( buffers.data() ), reinterpret_cast<const VkDeviceSize *>( offsets.data() ) );
3207 
3208 
3209 
3210   }
3211 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3212 
3213 
3214   template <typename Dispatch>
draw(uint32_t vertexCount,uint32_t instanceCount,uint32_t firstVertex,uint32_t firstInstance,Dispatch const & d) const3215   VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3216   {
3217     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3218     d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
3219   }
3220 
3221 
3222   template <typename Dispatch>
drawIndexed(uint32_t indexCount,uint32_t instanceCount,uint32_t firstIndex,int32_t vertexOffset,uint32_t firstInstance,Dispatch const & d) const3223   VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3224   {
3225     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3226     d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
3227   }
3228 
3229 
3230   template <typename Dispatch>
drawIndirect(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,uint32_t drawCount,uint32_t stride,Dispatch const & d) const3231   VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3232   {
3233     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3234     d.vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
3235   }
3236 
3237 
3238   template <typename Dispatch>
drawIndexedIndirect(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,uint32_t drawCount,uint32_t stride,Dispatch const & d) const3239   VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3240   {
3241     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3242     d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
3243   }
3244 
3245 
3246   template <typename Dispatch>
dispatch(uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ,Dispatch const & d) const3247   VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3248   {
3249     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3250     d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
3251   }
3252 
3253 
3254   template <typename Dispatch>
dispatchIndirect(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,Dispatch const & d) const3255   VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3256   {
3257     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3258     d.vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
3259   }
3260 
3261 
3262   template <typename Dispatch>
copyBuffer(VULKAN_HPP_NAMESPACE::Buffer srcBuffer,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions,Dispatch const & d) const3263   VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3264   {
3265     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3266     d.vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferCopy *>( pRegions ) );
3267   }
3268 
3269 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3270   template <typename Dispatch>
copyBuffer(VULKAN_HPP_NAMESPACE::Buffer srcBuffer,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions,Dispatch const & d) const3271   VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3272   {
3273     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3274 
3275 
3276 
3277     d.vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regions.size(), reinterpret_cast<const VkBufferCopy *>( regions.data() ) );
3278 
3279 
3280 
3281   }
3282 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3283 
3284 
3285   template <typename Dispatch>
copyImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions,Dispatch const & d) const3286   VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3287   {
3288     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3289     d.vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageCopy *>( pRegions ) );
3290   }
3291 
3292 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3293   template <typename Dispatch>
copyImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions,Dispatch const & d) const3294   VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3295   {
3296     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3297 
3298 
3299 
3300     d.vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size(), reinterpret_cast<const VkImageCopy *>( regions.data() ) );
3301 
3302 
3303 
3304   }
3305 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3306 
3307 
3308   template <typename Dispatch>
blitImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions,VULKAN_HPP_NAMESPACE::Filter filter,Dispatch const & d) const3309   VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3310   {
3311     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3312     d.vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageBlit *>( pRegions ), static_cast<VkFilter>( filter ) );
3313   }
3314 
3315 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3316   template <typename Dispatch>
blitImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions,VULKAN_HPP_NAMESPACE::Filter filter,Dispatch const & d) const3317   VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3318   {
3319     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3320 
3321 
3322 
3323     d.vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size(), reinterpret_cast<const VkImageBlit *>( regions.data() ), static_cast<VkFilter>( filter ) );
3324 
3325 
3326 
3327   }
3328 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3329 
3330 
3331   template <typename Dispatch>
copyBufferToImage(VULKAN_HPP_NAMESPACE::Buffer srcBuffer,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,Dispatch const & d) const3332   VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3333   {
3334     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3335     d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkBufferImageCopy *>( pRegions ) );
3336   }
3337 
3338 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3339   template <typename Dispatch>
copyBufferToImage(VULKAN_HPP_NAMESPACE::Buffer srcBuffer,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,Dispatch const & d) const3340   VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3341   {
3342     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3343 
3344 
3345 
3346     d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size(), reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) );
3347 
3348 
3349 
3350   }
3351 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3352 
3353 
3354   template <typename Dispatch>
copyImageToBuffer(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,Dispatch const & d) const3355   VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3356   {
3357     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3358     d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferImageCopy *>( pRegions ) );
3359   }
3360 
3361 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3362   template <typename Dispatch>
copyImageToBuffer(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,Dispatch const & d) const3363   VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3364   {
3365     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3366 
3367 
3368 
3369     d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regions.size(), reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) );
3370 
3371 
3372 
3373   }
3374 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3375 
3376 
3377   template <typename Dispatch>
updateBuffer(VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,VULKAN_HPP_NAMESPACE::DeviceSize dataSize,const void * pData,Dispatch const & d) const3378   VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void * pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3379   {
3380     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3381     d.vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( dataSize ), pData );
3382   }
3383 
3384 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3385   template <typename DataType, typename Dispatch>
updateBuffer(VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,VULKAN_HPP_NAMESPACE::ArrayProxy<const DataType> const & data,Dispatch const & d) const3386   VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::ArrayProxy<const DataType> const & data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3387   {
3388     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3389 
3390 
3391 
3392     d.vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), data.size() * sizeof( DataType ), reinterpret_cast<const void *>( data.data() ) );
3393 
3394 
3395 
3396   }
3397 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3398 
3399 
3400   template <typename Dispatch>
fillBuffer(VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,VULKAN_HPP_NAMESPACE::DeviceSize size,uint32_t data,Dispatch const & d) const3401   VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3402   {
3403     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3404     d.vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( size ), data );
3405   }
3406 
3407 
3408   template <typename Dispatch>
clearColorImage(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor,uint32_t rangeCount,const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges,Dispatch const & d) const3409   VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3410   {
3411     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3412     d.vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue *>( pColor ), rangeCount, reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) );
3413   }
3414 
3415 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3416   template <typename Dispatch>
clearColorImage(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,const VULKAN_HPP_NAMESPACE::ClearColorValue & color,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,Dispatch const & d) const3417   VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue & color, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3418   {
3419     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3420 
3421 
3422 
3423     d.vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue *>( &color ), ranges.size(), reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
3424 
3425 
3426 
3427   }
3428 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3429 
3430 
3431   template <typename Dispatch>
clearDepthStencilImage(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil,uint32_t rangeCount,const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges,Dispatch const & d) const3432   VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3433   {
3434     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3435     d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue *>( pDepthStencil ), rangeCount, reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) );
3436   }
3437 
3438 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3439   template <typename Dispatch>
clearDepthStencilImage(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,Dispatch const & d) const3440   VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3441   {
3442     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3443 
3444 
3445 
3446     d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue *>( &depthStencil ), ranges.size(), reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
3447 
3448 
3449 
3450   }
3451 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3452 
3453 
3454   template <typename Dispatch>
clearAttachments(uint32_t attachmentCount,const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments,uint32_t rectCount,const VULKAN_HPP_NAMESPACE::ClearRect * pRects,Dispatch const & d) const3455   VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect * pRects, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3456   {
3457     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3458     d.vkCmdClearAttachments( m_commandBuffer, attachmentCount, reinterpret_cast<const VkClearAttachment *>( pAttachments ), rectCount, reinterpret_cast<const VkClearRect *>( pRects ) );
3459   }
3460 
3461 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3462   template <typename Dispatch>
clearAttachments(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects,Dispatch const & d) const3463   VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3464   {
3465     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3466 
3467 
3468 
3469     d.vkCmdClearAttachments( m_commandBuffer, attachments.size(), reinterpret_cast<const VkClearAttachment *>( attachments.data() ), rects.size(), reinterpret_cast<const VkClearRect *>( rects.data() ) );
3470 
3471 
3472 
3473   }
3474 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3475 
3476 
3477   template <typename Dispatch>
resolveImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,uint32_t regionCount,const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions,Dispatch const & d) const3478   VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3479   {
3480     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3481     d.vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageResolve *>( pRegions ) );
3482   }
3483 
3484 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3485   template <typename Dispatch>
resolveImage(VULKAN_HPP_NAMESPACE::Image srcImage,VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,VULKAN_HPP_NAMESPACE::Image dstImage,VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions,Dispatch const & d) const3486   VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3487   {
3488     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3489 
3490 
3491 
3492     d.vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size(), reinterpret_cast<const VkImageResolve *>( regions.data() ) );
3493 
3494 
3495 
3496   }
3497 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3498 
3499 
3500   template <typename Dispatch>
setEvent(VULKAN_HPP_NAMESPACE::Event event,VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,Dispatch const & d) const3501   VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3502   {
3503     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3504     d.vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
3505   }
3506 
3507 
3508   template <typename Dispatch>
resetEvent(VULKAN_HPP_NAMESPACE::Event event,VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,Dispatch const & d) const3509   VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3510   {
3511     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3512     d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
3513   }
3514 
3515 
3516   template <typename Dispatch>
waitEvents(uint32_t eventCount,const VULKAN_HPP_NAMESPACE::Event * pEvents,VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,uint32_t memoryBarrierCount,const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,Dispatch const & d) const3517   VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event * pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3518   {
3519     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3520     d.vkCmdWaitEvents( m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) );
3521   }
3522 
3523 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3524   template <typename Dispatch>
waitEvents(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,Dispatch const & d) const3525   VULKAN_HPP_INLINE void CommandBuffer::waitEvents( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3526   {
3527     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3528 
3529 
3530 
3531     d.vkCmdWaitEvents( m_commandBuffer, events.size(), reinterpret_cast<const VkEvent *>( events.data() ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarriers.size(), reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ), bufferMemoryBarriers.size(), reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size(), reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) );
3532 
3533 
3534 
3535   }
3536 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3537 
3538 
3539   template <typename Dispatch>
pipelineBarrier(VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,Dispatch const & d) const3540   VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3541   {
3542     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3543     d.vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) );
3544   }
3545 
3546 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3547   template <typename Dispatch>
pipelineBarrier(VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,Dispatch const & d) const3548   VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3549   {
3550     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3551 
3552 
3553 
3554     d.vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarriers.size(), reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ), bufferMemoryBarriers.size(), reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size(), reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) );
3555 
3556 
3557 
3558   }
3559 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3560 
3561 
3562   template <typename Dispatch>
beginQuery(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,VULKAN_HPP_NAMESPACE::QueryControlFlags flags,Dispatch const & d) const3563   VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3564   {
3565     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3566     d.vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
3567   }
3568 
3569 
3570   template <typename Dispatch>
endQuery(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,Dispatch const & d) const3571   VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3572   {
3573     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3574     d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
3575   }
3576 
3577 
3578   template <typename Dispatch>
resetQueryPool(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,Dispatch const & d) const3579   VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3580   {
3581     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3582     d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
3583   }
3584 
3585 
3586   template <typename Dispatch>
writeTimestamp(VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,Dispatch const & d) const3587   VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3588   {
3589     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3590     d.vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
3591   }
3592 
3593 
3594   template <typename Dispatch>
copyQueryPoolResults(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,VULKAN_HPP_NAMESPACE::DeviceSize stride,VULKAN_HPP_NAMESPACE::QueryResultFlags flags,Dispatch const & d) const3595   VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3596   {
3597     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3598     d.vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) );
3599   }
3600 
3601 
3602   template <typename Dispatch>
pushConstants(VULKAN_HPP_NAMESPACE::PipelineLayout layout,VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,uint32_t offset,uint32_t size,const void * pValues,Dispatch const & d) const3603   VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void * pValues, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3604   {
3605     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3606     d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, size, pValues );
3607   }
3608 
3609 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3610   template <typename ValuesType, typename Dispatch>
pushConstants(VULKAN_HPP_NAMESPACE::PipelineLayout layout,VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,uint32_t offset,VULKAN_HPP_NAMESPACE::ArrayProxy<const ValuesType> const & values,Dispatch const & d) const3611   VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, VULKAN_HPP_NAMESPACE::ArrayProxy<const ValuesType> const & values, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3612   {
3613     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3614 
3615 
3616 
3617     d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, values.size() * sizeof( ValuesType ), reinterpret_cast<const void *>( values.data() ) );
3618 
3619 
3620 
3621   }
3622 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3623 
3624 
3625   template <typename Dispatch>
beginRenderPass(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,VULKAN_HPP_NAMESPACE::SubpassContents contents,Dispatch const & d) const3626   VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3627   {
3628     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3629     d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), static_cast<VkSubpassContents>( contents ) );
3630   }
3631 
3632 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3633   template <typename Dispatch>
beginRenderPass(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,VULKAN_HPP_NAMESPACE::SubpassContents contents,Dispatch const & d) const3634   VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3635   {
3636     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3637 
3638 
3639 
3640     d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) );
3641 
3642 
3643 
3644   }
3645 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3646 
3647 
3648   template <typename Dispatch>
nextSubpass(VULKAN_HPP_NAMESPACE::SubpassContents contents,Dispatch const & d) const3649   VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3650   {
3651     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3652     d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
3653   }
3654 
3655 
3656   template <typename Dispatch>
endRenderPass(Dispatch const & d) const3657   VULKAN_HPP_INLINE void CommandBuffer::endRenderPass( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3658   {
3659     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3660     d.vkCmdEndRenderPass( m_commandBuffer );
3661   }
3662 
3663 
3664   template <typename Dispatch>
executeCommands(uint32_t commandBufferCount,const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,Dispatch const & d) const3665   VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3666   {
3667     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3668     d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
3669   }
3670 
3671 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3672   template <typename Dispatch>
executeCommands(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,Dispatch const & d) const3673   VULKAN_HPP_INLINE void CommandBuffer::executeCommands( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3674   {
3675     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3676 
3677 
3678 
3679     d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
3680 
3681 
3682 
3683   }
3684 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3685 
3686   //=== VK_VERSION_1_1 ===
3687 
3688 
3689   template <typename Dispatch>
enumerateInstanceVersion(uint32_t * pApiVersion,Dispatch const & d)3690   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t * pApiVersion, Dispatch const & d  ) VULKAN_HPP_NOEXCEPT
3691   {
3692     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3693     return static_cast<Result>( d.vkEnumerateInstanceVersion( pApiVersion ) );
3694   }
3695 
3696 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3697   template <typename Dispatch>
enumerateInstanceVersion(Dispatch const & d)3698   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint32_t>::type enumerateInstanceVersion( Dispatch const & d )
3699   {
3700     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3701 
3702 
3703     uint32_t apiVersion;
3704     VkResult result = d.vkEnumerateInstanceVersion( &apiVersion );
3705     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" );
3706 
3707     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), apiVersion );
3708   }
3709 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3710 
3711 
3712   template <typename Dispatch>
bindBufferMemory2(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,Dispatch const & d) const3713   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3714   {
3715     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3716     return static_cast<Result>( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
3717   }
3718 
3719 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3720   template <typename Dispatch>
bindBufferMemory2(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,Dispatch const & d) const3721   VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos, Dispatch const & d ) const
3722   {
3723     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3724 
3725 
3726 
3727     VkResult result = d.vkBindBufferMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) );
3728     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" );
3729 
3730     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
3731   }
3732 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3733 
3734 
3735   template <typename Dispatch>
bindImageMemory2(uint32_t bindInfoCount,const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,Dispatch const & d) const3736   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3737   {
3738     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3739     return static_cast<Result>( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
3740   }
3741 
3742 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3743   template <typename Dispatch>
bindImageMemory2(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos,Dispatch const & d) const3744   VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d ) const
3745   {
3746     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3747 
3748 
3749 
3750     VkResult result = d.vkBindImageMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) );
3751     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" );
3752 
3753     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
3754   }
3755 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3756 
3757 
3758   template <typename Dispatch>
getGroupPeerMemoryFeatures(uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,Dispatch const & d) const3759   VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3760   {
3761     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3762     d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
3763   }
3764 
3765 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3766   template <typename Dispatch>
getGroupPeerMemoryFeatures(uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,Dispatch const & d) const3767   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3768   {
3769     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3770 
3771 
3772     VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
3773     d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
3774 
3775 
3776     return peerMemoryFeatures;
3777   }
3778 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3779 
3780 
3781   template <typename Dispatch>
setDeviceMask(uint32_t deviceMask,Dispatch const & d) const3782   VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3783   {
3784     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3785     d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask );
3786   }
3787 
3788 
3789   template <typename Dispatch>
dispatchBase(uint32_t baseGroupX,uint32_t baseGroupY,uint32_t baseGroupZ,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ,Dispatch const & d) const3790   VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3791   {
3792     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3793     d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
3794   }
3795 
3796 
3797   template <typename Dispatch>
enumeratePhysicalDeviceGroups(uint32_t * pPhysicalDeviceGroupCount,VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,Dispatch const & d) const3798   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups( uint32_t * pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3799   {
3800     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3801     return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) );
3802   }
3803 
3804 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3805   template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch>
enumeratePhysicalDeviceGroups(Dispatch const & d) const3806   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type Instance::enumeratePhysicalDeviceGroups( Dispatch const & d ) const
3807   {
3808     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3809 
3810 
3811     std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties;
3812     uint32_t physicalDeviceGroupCount;
3813     VkResult result;
3814     do
3815     {
3816       result = d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr );
3817       if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount )
3818       {
3819         physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
3820         result = d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
3821       }
3822     } while ( result == VK_INCOMPLETE );
3823     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" );
3824     VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
3825     if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
3826     {
3827       physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
3828     }
3829     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties );
3830   }
3831 
3832   template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceGroupProperties>::value, int>::type>
enumeratePhysicalDeviceGroups(PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator,Dispatch const & d) const3833   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type Instance::enumeratePhysicalDeviceGroups( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const
3834   {
3835     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3836 
3837 
3838     std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties( physicalDeviceGroupPropertiesAllocator );
3839     uint32_t physicalDeviceGroupCount;
3840     VkResult result;
3841     do
3842     {
3843       result = d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr );
3844       if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount )
3845       {
3846         physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
3847         result = d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
3848       }
3849     } while ( result == VK_INCOMPLETE );
3850     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" );
3851     VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
3852     if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
3853     {
3854       physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
3855     }
3856     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties );
3857   }
3858 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3859 
3860 
3861   template <typename Dispatch>
getImageMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const3862   VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3863   {
3864     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3865     d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
3866   }
3867 
3868 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3869   template <typename Dispatch>
getImageMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info,Dispatch const & d) const3870   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3871   {
3872     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3873 
3874 
3875     VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
3876     d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
3877 
3878 
3879     return memoryRequirements;
3880   }
3881 
3882   template <typename X, typename Y, typename... Z, typename Dispatch>
getImageMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info,Dispatch const & d) const3883   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3884   {
3885     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3886 
3887 
3888     VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
3889     VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
3890     d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
3891 
3892 
3893     return structureChain;
3894   }
3895 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3896 
3897 
3898   template <typename Dispatch>
getBufferMemoryRequirements2(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const3899   VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3900   {
3901     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3902     d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
3903   }
3904 
3905 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3906   template <typename Dispatch>
getBufferMemoryRequirements2(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info,Dispatch const & d) const3907   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3908   {
3909     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3910 
3911 
3912     VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
3913     d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
3914 
3915 
3916     return memoryRequirements;
3917   }
3918 
3919   template <typename X, typename Y, typename... Z, typename Dispatch>
getBufferMemoryRequirements2(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info,Dispatch const & d) const3920   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3921   {
3922     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3923 
3924 
3925     VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
3926     VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
3927     d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
3928 
3929 
3930     return structureChain;
3931   }
3932 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3933 
3934 
3935   template <typename Dispatch>
getFeatures2(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures,Dispatch const & d) const3936   VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3937   {
3938     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3939     d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) );
3940   }
3941 
3942 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3943   template <typename Dispatch>
getFeatures2(Dispatch const & d) const3944   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3945   {
3946     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3947 
3948 
3949     VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
3950     d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
3951 
3952 
3953     return features;
3954   }
3955 
3956   template <typename X, typename Y, typename... Z, typename Dispatch>
getFeatures2(Dispatch const & d) const3957   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3958   {
3959     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3960 
3961 
3962     VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
3963     VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
3964     d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
3965 
3966 
3967     return structureChain;
3968   }
3969 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
3970 
3971 
3972   template <typename Dispatch>
getProperties2(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,Dispatch const & d) const3973   VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
3974   {
3975     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3976     d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) );
3977   }
3978 
3979 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3980   template <typename Dispatch>
getProperties2(Dispatch const & d) const3981   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3982   {
3983     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3984 
3985 
3986     VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
3987     d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
3988 
3989 
3990     return properties;
3991   }
3992 
3993   template <typename X, typename Y, typename... Z, typename Dispatch>
getProperties2(Dispatch const & d) const3994   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
3995   {
3996     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
3997 
3998 
3999     VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
4000     VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
4001     d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
4002 
4003 
4004     return structureChain;
4005   }
4006 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4007 
4008 
4009   template <typename Dispatch>
getFormatProperties2(VULKAN_HPP_NAMESPACE::Format format,VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,Dispatch const & d) const4010   VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4011   {
4012     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4013     d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) );
4014   }
4015 
4016 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4017   template <typename Dispatch>
getFormatProperties2(VULKAN_HPP_NAMESPACE::Format format,Dispatch const & d) const4018   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4019   {
4020     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4021 
4022 
4023     VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
4024     d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
4025 
4026 
4027     return formatProperties;
4028   }
4029 
4030   template <typename X, typename Y, typename... Z, typename Dispatch>
getFormatProperties2(VULKAN_HPP_NAMESPACE::Format format,Dispatch const & d) const4031   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4032   {
4033     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4034 
4035 
4036     VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
4037     VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
4038     d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
4039 
4040 
4041     return structureChain;
4042   }
4043 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4044 
4045 
4046   template <typename Dispatch>
getImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,Dispatch const & d) const4047   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4048   {
4049     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4050     return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ), reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) );
4051   }
4052 
4053 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4054   template <typename Dispatch>
getImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo,Dispatch const & d) const4055   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
4056   {
4057     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4058 
4059 
4060     VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
4061     VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
4062     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
4063 
4064     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
4065   }
4066 
4067   template <typename X, typename Y, typename... Z, typename Dispatch>
getImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo,Dispatch const & d) const4068   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
4069   {
4070     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4071 
4072 
4073     StructureChain<X, Y, Z...> structureChain;
4074     VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
4075     VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
4076     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
4077 
4078     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
4079   }
4080 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4081 
4082 
4083   template <typename Dispatch>
getQueueFamilyProperties2(uint32_t * pQueueFamilyPropertyCount,VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,Dispatch const & d) const4084   VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4085   {
4086     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4087     d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) );
4088   }
4089 
4090 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4091   template <typename QueueFamilyProperties2Allocator, typename Dispatch>
getQueueFamilyProperties2(Dispatch const & d) const4092   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const
4093   {
4094     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4095 
4096 
4097     std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties;
4098     uint32_t queueFamilyPropertyCount;
4099     d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
4100     queueFamilyProperties.resize( queueFamilyPropertyCount );
4101     d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
4102 
4103     VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
4104     if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
4105     {
4106       queueFamilyProperties.resize( queueFamilyPropertyCount );
4107     }
4108     return queueFamilyProperties;
4109   }
4110 
4111   template <typename QueueFamilyProperties2Allocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, QueueFamilyProperties2>::value, int>::type>
getQueueFamilyProperties2(QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator,Dispatch const & d) const4112   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> PhysicalDevice::getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const
4113   {
4114     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4115 
4116 
4117     std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( queueFamilyProperties2Allocator );
4118     uint32_t queueFamilyPropertyCount;
4119     d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
4120     queueFamilyProperties.resize( queueFamilyPropertyCount );
4121     d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
4122 
4123     VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
4124     if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
4125     {
4126       queueFamilyProperties.resize( queueFamilyPropertyCount );
4127     }
4128     return queueFamilyProperties;
4129   }
4130 
4131   template <typename StructureChain, typename StructureChainAllocator, typename Dispatch>
getQueueFamilyProperties2(Dispatch const & d) const4132   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const
4133   {
4134     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4135 
4136 
4137     std::vector<StructureChain, StructureChainAllocator> structureChains;
4138     std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
4139     uint32_t queueFamilyPropertyCount;
4140     d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
4141     structureChains.resize( queueFamilyPropertyCount );
4142     queueFamilyProperties.resize( queueFamilyPropertyCount );
4143     for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
4144     {
4145       queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
4146     }
4147     d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
4148 
4149     VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
4150       if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
4151       {
4152         structureChains.resize( queueFamilyPropertyCount );
4153       }
4154       for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
4155       {
4156         structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
4157       }
4158     return structureChains;
4159   }
4160 
4161   template <typename StructureChain, typename StructureChainAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, StructureChain>::value, int>::type>
getQueueFamilyProperties2(StructureChainAllocator & structureChainAllocator,Dispatch const & d) const4162   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> PhysicalDevice::getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const
4163   {
4164     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4165 
4166 
4167     std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator );
4168     std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
4169     uint32_t queueFamilyPropertyCount;
4170     d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
4171     structureChains.resize( queueFamilyPropertyCount );
4172     queueFamilyProperties.resize( queueFamilyPropertyCount );
4173     for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
4174     {
4175       queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
4176     }
4177     d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
4178 
4179     VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
4180       if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
4181       {
4182         structureChains.resize( queueFamilyPropertyCount );
4183       }
4184       for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
4185       {
4186         structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
4187       }
4188     return structureChains;
4189   }
4190 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4191 
4192 
4193   template <typename Dispatch>
getMemoryProperties2(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,Dispatch const & d) const4194   VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4195   {
4196     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4197     d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
4198   }
4199 
4200 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4201   template <typename Dispatch>
getMemoryProperties2(Dispatch const & d) const4202   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4203   {
4204     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4205 
4206 
4207     VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
4208     d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
4209 
4210 
4211     return memoryProperties;
4212   }
4213 
4214   template <typename X, typename Y, typename... Z, typename Dispatch>
getMemoryProperties2(Dispatch const & d) const4215   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4216   {
4217     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4218 
4219 
4220     VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
4221     VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
4222     d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
4223 
4224 
4225     return structureChain;
4226   }
4227 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4228 
4229 
4230   template <typename Dispatch>
getQueue2(const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo,VULKAN_HPP_NAMESPACE::Queue * pQueue,Dispatch const & d) const4231   VULKAN_HPP_INLINE void Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo, VULKAN_HPP_NAMESPACE::Queue * pQueue, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4232   {
4233     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4234     d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( pQueueInfo ), reinterpret_cast<VkQueue *>( pQueue ) );
4235   }
4236 
4237 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4238   template <typename Dispatch>
getQueue2(const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & queueInfo,Dispatch const & d) const4239   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & queueInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4240   {
4241     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4242 
4243 
4244     VULKAN_HPP_NAMESPACE::Queue queue;
4245     d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( &queueInfo ), reinterpret_cast<VkQueue *>( &queue ) );
4246 
4247 
4248     return queue;
4249   }
4250 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4251 
4252 
4253   template <typename Dispatch>
createSamplerYcbcrConversion(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,Dispatch const & d) const4254   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4255   {
4256     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4257     return static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) );
4258   }
4259 
4260 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4261   template <typename Dispatch>
createSamplerYcbcrConversion(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4262   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
4263   {
4264     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4265 
4266 
4267     VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
4268     VkResult result = d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) );
4269     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" );
4270 
4271     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), ycbcrConversion );
4272   }
4273 
4274 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
4275   template <typename Dispatch>
createSamplerYcbcrConversionUnique(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4276   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type Device::createSamplerYcbcrConversionUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
4277   {
4278     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4279 
4280 
4281     VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
4282     VkResult result = d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) );
4283     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique" );
4284 
4285     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>( ycbcrConversion, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
4286   }
4287 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
4288 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4289 
4290 
4291   template <typename Dispatch>
destroySamplerYcbcrConversion(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const4292   VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4293   {
4294     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4295     d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
4296   }
4297 
4298 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4299   template <typename Dispatch>
destroySamplerYcbcrConversion(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4300   VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4301   {
4302     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4303 
4304 
4305 
4306     d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
4307 
4308 
4309 
4310   }
4311 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4312 
4313 
4314   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const4315   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4316   {
4317     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4318     d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
4319   }
4320 
4321 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4322   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4323   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4324   {
4325     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4326 
4327 
4328 
4329     d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
4330 
4331 
4332 
4333   }
4334 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4335 
4336 
4337   template <typename Dispatch>
getExternalBufferProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,Dispatch const & d) const4338   VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4339   {
4340     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4341     d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ), reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) );
4342   }
4343 
4344 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4345   template <typename Dispatch>
getExternalBufferProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo,Dispatch const & d) const4346   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4347   {
4348     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4349 
4350 
4351     VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
4352     d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ), reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
4353 
4354 
4355     return externalBufferProperties;
4356   }
4357 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4358 
4359 
4360   template <typename Dispatch>
getExternalFenceProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,Dispatch const & d) const4361   VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4362   {
4363     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4364     d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ), reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) );
4365   }
4366 
4367 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4368   template <typename Dispatch>
getExternalFenceProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo,Dispatch const & d) const4369   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4370   {
4371     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4372 
4373 
4374     VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
4375     d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ), reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
4376 
4377 
4378     return externalFenceProperties;
4379   }
4380 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4381 
4382 
4383   template <typename Dispatch>
getExternalSemaphoreProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,Dispatch const & d) const4384   VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4385   {
4386     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4387     d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) );
4388   }
4389 
4390 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4391   template <typename Dispatch>
getExternalSemaphoreProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,Dispatch const & d) const4392   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4393   {
4394     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4395 
4396 
4397     VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
4398     d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
4399 
4400 
4401     return externalSemaphoreProperties;
4402   }
4403 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4404 
4405 
4406   template <typename Dispatch>
getDescriptorSetLayoutSupport(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,Dispatch const & d) const4407   VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4408   {
4409     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4410     d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) );
4411   }
4412 
4413 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4414   template <typename Dispatch>
getDescriptorSetLayoutSupport(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,Dispatch const & d) const4415   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4416   {
4417     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4418 
4419 
4420     VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
4421     d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
4422 
4423 
4424     return support;
4425   }
4426 
4427   template <typename X, typename Y, typename... Z, typename Dispatch>
getDescriptorSetLayoutSupport(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,Dispatch const & d) const4428   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4429   {
4430     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4431 
4432 
4433     VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
4434     VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
4435     d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
4436 
4437 
4438     return structureChain;
4439   }
4440 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4441 
4442   //=== VK_VERSION_1_2 ===
4443 
4444 
4445   template <typename Dispatch>
drawIndirectCount(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const4446   VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4447   {
4448     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4449     d.vkCmdDrawIndirectCount( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
4450   }
4451 
4452 
4453   template <typename Dispatch>
drawIndexedIndirectCount(VULKAN_HPP_NAMESPACE::Buffer buffer,VULKAN_HPP_NAMESPACE::DeviceSize offset,VULKAN_HPP_NAMESPACE::Buffer countBuffer,VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride,Dispatch const & d) const4454   VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4455   {
4456     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4457     d.vkCmdDrawIndexedIndirectCount( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
4458   }
4459 
4460 
4461   template <typename Dispatch>
createRenderPass2(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,Dispatch const & d) const4462   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4463   {
4464     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4465     return static_cast<Result>( d.vkCreateRenderPass2( m_device, reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
4466   }
4467 
4468 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4469   template <typename Dispatch>
createRenderPass2(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4470   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
4471   {
4472     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4473 
4474 
4475     VULKAN_HPP_NAMESPACE::RenderPass renderPass;
4476     VkResult result = d.vkCreateRenderPass2( m_device, reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkRenderPass *>( &renderPass ) );
4477     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" );
4478 
4479     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), renderPass );
4480   }
4481 
4482 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
4483   template <typename Dispatch>
createRenderPass2Unique(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4484   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type Device::createRenderPass2Unique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
4485   {
4486     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4487 
4488 
4489     VULKAN_HPP_NAMESPACE::RenderPass renderPass;
4490     VkResult result = d.vkCreateRenderPass2( m_device, reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkRenderPass *>( &renderPass ) );
4491     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique" );
4492 
4493     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
4494   }
4495 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
4496 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4497 
4498 
4499   template <typename Dispatch>
beginRenderPass2(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,Dispatch const & d) const4500   VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4501   {
4502     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4503     d.vkCmdBeginRenderPass2( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
4504   }
4505 
4506 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4507   template <typename Dispatch>
beginRenderPass2(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,Dispatch const & d) const4508   VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4509   {
4510     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4511 
4512 
4513 
4514     d.vkCmdBeginRenderPass2( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
4515 
4516 
4517 
4518   }
4519 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4520 
4521 
4522   template <typename Dispatch>
nextSubpass2(const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,Dispatch const & d) const4523   VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4524   {
4525     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4526     d.vkCmdNextSubpass2( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
4527   }
4528 
4529 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4530   template <typename Dispatch>
nextSubpass2(const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,Dispatch const & d) const4531   VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4532   {
4533     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4534 
4535 
4536 
4537     d.vkCmdNextSubpass2( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
4538 
4539 
4540 
4541   }
4542 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4543 
4544 
4545   template <typename Dispatch>
endRenderPass2(const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,Dispatch const & d) const4546   VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4547   {
4548     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4549     d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
4550   }
4551 
4552 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4553   template <typename Dispatch>
endRenderPass2(const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,Dispatch const & d) const4554   VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4555   {
4556     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4557 
4558 
4559 
4560     d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
4561 
4562 
4563 
4564   }
4565 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4566 
4567 
4568   template <typename Dispatch>
resetQueryPool(VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,Dispatch const & d) const4569   VULKAN_HPP_INLINE void Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4570   {
4571     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4572     d.vkResetQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
4573   }
4574 
4575 
4576   template <typename Dispatch>
getSemaphoreCounterValue(VULKAN_HPP_NAMESPACE::Semaphore semaphore,uint64_t * pValue,Dispatch const & d) const4577   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t * pValue, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4578   {
4579     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4580     return static_cast<Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
4581   }
4582 
4583 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4584   template <typename Dispatch>
getSemaphoreCounterValue(VULKAN_HPP_NAMESPACE::Semaphore semaphore,Dispatch const & d) const4585   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const
4586   {
4587     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4588 
4589 
4590     uint64_t value;
4591     VkResult result = d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), &value );
4592     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" );
4593 
4594     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), value );
4595   }
4596 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4597 
4598 
4599   template <typename Dispatch>
waitSemaphores(const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,uint64_t timeout,Dispatch const & d) const4600   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, uint64_t timeout, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4601   {
4602     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4603     return static_cast<Result>( d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) );
4604   }
4605 
4606 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4607   template <typename Dispatch>
waitSemaphores(const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo,uint64_t timeout,Dispatch const & d) const4608   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const
4609   {
4610     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4611 
4612 
4613 
4614     VkResult result = d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout );
4615     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
4616 
4617     return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
4618   }
4619 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4620 
4621 
4622   template <typename Dispatch>
signalSemaphore(const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo,Dispatch const & d) const4623   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4624   {
4625     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4626     return static_cast<Result>( d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
4627   }
4628 
4629 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4630   template <typename Dispatch>
signalSemaphore(const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo,Dispatch const & d) const4631   VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const
4632   {
4633     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4634 
4635 
4636 
4637     VkResult result = d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) );
4638     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" );
4639 
4640     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
4641   }
4642 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4643 
4644 
4645   template <typename Dispatch>
getBufferAddress(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,Dispatch const & d) const4646   VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4647   {
4648     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4649     return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
4650   }
4651 
4652 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4653   template <typename Dispatch>
getBufferAddress(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,Dispatch const & d) const4654   VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4655   {
4656     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4657 
4658 
4659 
4660     VkDeviceAddress result = d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
4661 
4662 
4663     return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
4664   }
4665 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4666 
4667 
4668   template <typename Dispatch>
getBufferOpaqueCaptureAddress(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,Dispatch const & d) const4669   VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4670   {
4671     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4672     return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
4673   }
4674 
4675 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4676   template <typename Dispatch>
getBufferOpaqueCaptureAddress(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,Dispatch const & d) const4677   VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4678   {
4679     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4680 
4681 
4682 
4683     uint64_t result = d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
4684 
4685 
4686     return result;
4687   }
4688 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4689 
4690 
4691   template <typename Dispatch>
getMemoryOpaqueCaptureAddress(const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,Dispatch const & d) const4692   VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4693   {
4694     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4695     return d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
4696   }
4697 
4698 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4699   template <typename Dispatch>
getMemoryOpaqueCaptureAddress(const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info,Dispatch const & d) const4700   VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4701   {
4702     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4703 
4704 
4705 
4706     uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
4707 
4708 
4709     return result;
4710   }
4711 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4712 
4713   //=== VK_VERSION_1_3 ===
4714 
4715 
4716   template <typename Dispatch>
getToolProperties(uint32_t * pToolCount,VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties,Dispatch const & d) const4717   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolProperties( uint32_t * pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4718   {
4719     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4720     return static_cast<Result>( d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) );
4721   }
4722 
4723 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4724   template <typename PhysicalDeviceToolPropertiesAllocator, typename Dispatch>
getToolProperties(Dispatch const & d) const4725   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type PhysicalDevice::getToolProperties( Dispatch const & d ) const
4726   {
4727     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4728 
4729 
4730     std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties;
4731     uint32_t toolCount;
4732     VkResult result;
4733     do
4734     {
4735       result = d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr );
4736       if ( ( result == VK_SUCCESS ) && toolCount )
4737       {
4738         toolProperties.resize( toolCount );
4739         result = d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
4740       }
4741     } while ( result == VK_INCOMPLETE );
4742     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" );
4743     VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
4744     if ( toolCount < toolProperties.size() )
4745     {
4746       toolProperties.resize( toolCount );
4747     }
4748     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), toolProperties );
4749   }
4750 
4751   template <typename PhysicalDeviceToolPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceToolProperties>::value, int>::type>
getToolProperties(PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator,Dispatch const & d) const4752   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type PhysicalDevice::getToolProperties( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const
4753   {
4754     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4755 
4756 
4757     std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties( physicalDeviceToolPropertiesAllocator );
4758     uint32_t toolCount;
4759     VkResult result;
4760     do
4761     {
4762       result = d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr );
4763       if ( ( result == VK_SUCCESS ) && toolCount )
4764       {
4765         toolProperties.resize( toolCount );
4766         result = d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
4767       }
4768     } while ( result == VK_INCOMPLETE );
4769     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" );
4770     VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
4771     if ( toolCount < toolProperties.size() )
4772     {
4773       toolProperties.resize( toolCount );
4774     }
4775     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), toolProperties );
4776   }
4777 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4778 
4779 
4780   template <typename Dispatch>
createPrivateDataSlot(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot,Dispatch const & d) const4781   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4782   {
4783     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4784     return static_cast<Result>( d.vkCreatePrivateDataSlot( m_device, reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) );
4785   }
4786 
4787 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4788   template <typename Dispatch>
createPrivateDataSlot(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4789   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlot>::type Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
4790   {
4791     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4792 
4793 
4794     VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot;
4795     VkResult result = d.vkCreatePrivateDataSlot( m_device, reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) );
4796     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlot" );
4797 
4798     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), privateDataSlot );
4799   }
4800 
4801 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
4802   template <typename Dispatch>
createPrivateDataSlotUnique(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4803   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>>::type Device::createPrivateDataSlotUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
4804   {
4805     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4806 
4807 
4808     VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot;
4809     VkResult result = d.vkCreatePrivateDataSlot( m_device, reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) );
4810     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotUnique" );
4811 
4812     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>( privateDataSlot, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
4813   }
4814 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
4815 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4816 
4817 
4818   template <typename Dispatch>
destroyPrivateDataSlot(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const4819   VULKAN_HPP_INLINE void Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4820   {
4821     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4822     d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
4823   }
4824 
4825 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4826   template <typename Dispatch>
destroyPrivateDataSlot(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4827   VULKAN_HPP_INLINE void Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4828   {
4829     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4830 
4831 
4832 
4833     d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
4834 
4835 
4836 
4837   }
4838 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4839 
4840 
4841   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const4842   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4843   {
4844     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4845     d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
4846   }
4847 
4848 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4849   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const4850   VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4851   {
4852     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4853 
4854 
4855 
4856     d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
4857 
4858 
4859 
4860   }
4861 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4862 
4863 
4864 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
4865   template <typename Dispatch>
setPrivateData(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,uint64_t data,Dispatch const & d) const4866   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t data, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4867   {
4868     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4869     return static_cast<Result>( d.vkSetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) );
4870   }
4871 #else
4872   template <typename Dispatch>
setPrivateData(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,uint64_t data,Dispatch const & d) const4873   VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t data, Dispatch const & d ) const
4874   {
4875     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4876 
4877 
4878 
4879     VkResult result = d.vkSetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data );
4880     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" );
4881 
4882     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
4883   }
4884 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
4885 
4886 
4887   template <typename Dispatch>
getPrivateData(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,uint64_t * pData,Dispatch const & d) const4888   VULKAN_HPP_INLINE void Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t * pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4889   {
4890     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4891     d.vkGetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData );
4892   }
4893 
4894 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4895   template <typename Dispatch>
getPrivateData(VULKAN_HPP_NAMESPACE::ObjectType objectType_,uint64_t objectHandle,VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,Dispatch const & d) const4896   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4897   {
4898     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4899 
4900 
4901     uint64_t data;
4902     d.vkGetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data );
4903 
4904 
4905     return data;
4906   }
4907 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4908 
4909 
4910   template <typename Dispatch>
setEvent2(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,Dispatch const & d) const4911   VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4912   {
4913     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4914     d.vkCmdSetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
4915   }
4916 
4917 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4918   template <typename Dispatch>
setEvent2(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,Dispatch const & d) const4919   VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4920   {
4921     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4922 
4923 
4924 
4925     d.vkCmdSetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
4926 
4927 
4928 
4929   }
4930 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4931 
4932 
4933   template <typename Dispatch>
resetEvent2(VULKAN_HPP_NAMESPACE::Event event,VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask,Dispatch const & d) const4934   VULKAN_HPP_INLINE void CommandBuffer::resetEvent2( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4935   {
4936     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4937     d.vkCmdResetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) );
4938   }
4939 
4940 
4941   template <typename Dispatch>
waitEvents2(uint32_t eventCount,const VULKAN_HPP_NAMESPACE::Event * pEvents,const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos,Dispatch const & d) const4942   VULKAN_HPP_INLINE void CommandBuffer::waitEvents2( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event * pEvents, const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4943   {
4944     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4945     d.vkCmdWaitEvents2( m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) );
4946   }
4947 
4948 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4949   template <typename Dispatch>
waitEvents2(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos,Dispatch const & d) const4950   VULKAN_HPP_INLINE void CommandBuffer::waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
4951   {
4952     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4953 #ifdef VULKAN_HPP_NO_EXCEPTIONS
4954     VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() );
4955 #else
4956     if ( events.size() != dependencyInfos.size() )
4957   {
4958     throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2: events.size() != dependencyInfos.size()" );
4959   }
4960 #endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
4961 
4962 
4963     d.vkCmdWaitEvents2( m_commandBuffer, events.size(), reinterpret_cast<const VkEvent *>( events.data() ), reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) );
4964 
4965 
4966 
4967   }
4968 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4969 
4970 
4971   template <typename Dispatch>
pipelineBarrier2(const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,Dispatch const & d) const4972   VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4973   {
4974     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4975     d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
4976   }
4977 
4978 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4979   template <typename Dispatch>
pipelineBarrier2(const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,Dispatch const & d) const4980   VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
4981   {
4982     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4983 
4984 
4985 
4986     d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
4987 
4988 
4989 
4990   }
4991 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
4992 
4993 
4994   template <typename Dispatch>
writeTimestamp2(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,Dispatch const & d) const4995   VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
4996   {
4997     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
4998     d.vkCmdWriteTimestamp2( m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query );
4999   }
5000 
5001 
5002   template <typename Dispatch>
submit2(uint32_t submitCount,const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const5003   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5004   {
5005     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5006     return static_cast<Result>( d.vkQueueSubmit2( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) );
5007   }
5008 
5009 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5010   template <typename Dispatch>
submit2(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const5011   VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
5012   {
5013     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5014 
5015 
5016 
5017     VkResult result = d.vkQueueSubmit2( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) );
5018     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" );
5019 
5020     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
5021   }
5022 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5023 
5024 
5025   template <typename Dispatch>
copyBuffer2(const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo,Dispatch const & d) const5026   VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5027   {
5028     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5029     d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) );
5030   }
5031 
5032 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5033   template <typename Dispatch>
copyBuffer2(const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo,Dispatch const & d) const5034   VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5035   {
5036     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5037 
5038 
5039 
5040     d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( &copyBufferInfo ) );
5041 
5042 
5043 
5044   }
5045 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5046 
5047 
5048   template <typename Dispatch>
copyImage2(const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo,Dispatch const & d) const5049   VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5050   {
5051     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5052     d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) );
5053   }
5054 
5055 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5056   template <typename Dispatch>
copyImage2(const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo,Dispatch const & d) const5057   VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5058   {
5059     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5060 
5061 
5062 
5063     d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( &copyImageInfo ) );
5064 
5065 
5066 
5067   }
5068 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5069 
5070 
5071   template <typename Dispatch>
copyBufferToImage2(const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo,Dispatch const & d) const5072   VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5073   {
5074     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5075     d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) );
5076   }
5077 
5078 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5079   template <typename Dispatch>
copyBufferToImage2(const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo,Dispatch const & d) const5080   VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5081   {
5082     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5083 
5084 
5085 
5086     d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( &copyBufferToImageInfo ) );
5087 
5088 
5089 
5090   }
5091 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5092 
5093 
5094   template <typename Dispatch>
copyImageToBuffer2(const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo,Dispatch const & d) const5095   VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5096   {
5097     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5098     d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) );
5099   }
5100 
5101 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5102   template <typename Dispatch>
copyImageToBuffer2(const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo,Dispatch const & d) const5103   VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5104   {
5105     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5106 
5107 
5108 
5109     d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( &copyImageToBufferInfo ) );
5110 
5111 
5112 
5113   }
5114 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5115 
5116 
5117   template <typename Dispatch>
blitImage2(const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo,Dispatch const & d) const5118   VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5119   {
5120     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5121     d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) );
5122   }
5123 
5124 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5125   template <typename Dispatch>
blitImage2(const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo,Dispatch const & d) const5126   VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5127   {
5128     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5129 
5130 
5131 
5132     d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) );
5133 
5134 
5135 
5136   }
5137 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5138 
5139 
5140   template <typename Dispatch>
resolveImage2(const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo,Dispatch const & d) const5141   VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5142   {
5143     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5144     d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) );
5145   }
5146 
5147 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5148   template <typename Dispatch>
resolveImage2(const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo,Dispatch const & d) const5149   VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5150   {
5151     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5152 
5153 
5154 
5155     d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) );
5156 
5157 
5158 
5159   }
5160 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5161 
5162 
5163   template <typename Dispatch>
beginRendering(const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo,Dispatch const & d) const5164   VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5165   {
5166     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5167     d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) );
5168   }
5169 
5170 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5171   template <typename Dispatch>
beginRendering(const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo,Dispatch const & d) const5172   VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5173   {
5174     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5175 
5176 
5177 
5178     d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) );
5179 
5180 
5181 
5182   }
5183 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5184 
5185 
5186   template <typename Dispatch>
endRendering(Dispatch const & d) const5187   VULKAN_HPP_INLINE void CommandBuffer::endRendering( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5188   {
5189     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5190     d.vkCmdEndRendering( m_commandBuffer );
5191   }
5192 
5193 
5194   template <typename Dispatch>
setCullMode(VULKAN_HPP_NAMESPACE::CullModeFlags cullMode,Dispatch const & d) const5195   VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5196   {
5197     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5198     d.vkCmdSetCullMode( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) );
5199   }
5200 
5201 
5202   template <typename Dispatch>
setFrontFace(VULKAN_HPP_NAMESPACE::FrontFace frontFace,Dispatch const & d) const5203   VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5204   {
5205     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5206     d.vkCmdSetFrontFace( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) );
5207   }
5208 
5209 
5210   template <typename Dispatch>
setPrimitiveTopology(VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology,Dispatch const & d) const5211   VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5212   {
5213     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5214     d.vkCmdSetPrimitiveTopology( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) );
5215   }
5216 
5217 
5218   template <typename Dispatch>
setViewportWithCount(uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::Viewport * pViewports,Dispatch const & d) const5219   VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount( uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport * pViewports, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5220   {
5221     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5222     d.vkCmdSetViewportWithCount( m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
5223   }
5224 
5225 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5226   template <typename Dispatch>
setViewportWithCount(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,Dispatch const & d) const5227   VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5228   {
5229     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5230 
5231 
5232 
5233     d.vkCmdSetViewportWithCount( m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
5234 
5235 
5236 
5237   }
5238 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5239 
5240 
5241   template <typename Dispatch>
setScissorWithCount(uint32_t scissorCount,const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,Dispatch const & d) const5242   VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCount( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5243   {
5244     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5245     d.vkCmdSetScissorWithCount( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
5246   }
5247 
5248 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5249   template <typename Dispatch>
setScissorWithCount(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,Dispatch const & d) const5250   VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5251   {
5252     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5253 
5254 
5255 
5256     d.vkCmdSetScissorWithCount( m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
5257 
5258 
5259 
5260   }
5261 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5262 
5263 
5264   template <typename Dispatch>
bindVertexBuffers2(uint32_t firstBinding,uint32_t bindingCount,const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides,Dispatch const & d) const5265   VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5266   {
5267     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5268     d.vkCmdBindVertexBuffers2( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), reinterpret_cast<const VkDeviceSize *>( pOffsets ), reinterpret_cast<const VkDeviceSize *>( pSizes ), reinterpret_cast<const VkDeviceSize *>( pStrides ) );
5269   }
5270 
5271 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5272   template <typename Dispatch>
bindVertexBuffers2(uint32_t firstBinding,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides,Dispatch const & d) const5273   VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
5274   {
5275     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5276 #ifdef VULKAN_HPP_NO_EXCEPTIONS
5277     VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
5278     VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
5279     VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() );
5280 #else
5281     if ( buffers.size() != offsets.size() )
5282   {
5283     throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" );
5284   }
5285     if ( !sizes.empty() && buffers.size() != sizes.size() )
5286   {
5287     throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != sizes.size()" );
5288   }
5289     if ( !strides.empty() && buffers.size() != strides.size() )
5290   {
5291     throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != strides.size()" );
5292   }
5293 #endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
5294 
5295 
5296     d.vkCmdBindVertexBuffers2( m_commandBuffer, firstBinding, buffers.size(), reinterpret_cast<const VkBuffer *>( buffers.data() ), reinterpret_cast<const VkDeviceSize *>( offsets.data() ), reinterpret_cast<const VkDeviceSize *>( sizes.data() ), reinterpret_cast<const VkDeviceSize *>( strides.data() ) );
5297 
5298 
5299 
5300   }
5301 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5302 
5303 
5304   template <typename Dispatch>
setDepthTestEnable(VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable,Dispatch const & d) const5305   VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5306   {
5307     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5308     d.vkCmdSetDepthTestEnable( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) );
5309   }
5310 
5311 
5312   template <typename Dispatch>
setDepthWriteEnable(VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable,Dispatch const & d) const5313   VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5314   {
5315     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5316     d.vkCmdSetDepthWriteEnable( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) );
5317   }
5318 
5319 
5320   template <typename Dispatch>
setDepthCompareOp(VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp,Dispatch const & d) const5321   VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5322   {
5323     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5324     d.vkCmdSetDepthCompareOp( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) );
5325   }
5326 
5327 
5328   template <typename Dispatch>
setDepthBoundsTestEnable(VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable,Dispatch const & d) const5329   VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5330   {
5331     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5332     d.vkCmdSetDepthBoundsTestEnable( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) );
5333   }
5334 
5335 
5336   template <typename Dispatch>
setStencilTestEnable(VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable,Dispatch const & d) const5337   VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5338   {
5339     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5340     d.vkCmdSetStencilTestEnable( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) );
5341   }
5342 
5343 
5344   template <typename Dispatch>
setStencilOp(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,VULKAN_HPP_NAMESPACE::StencilOp failOp,VULKAN_HPP_NAMESPACE::StencilOp passOp,VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,VULKAN_HPP_NAMESPACE::CompareOp compareOp,Dispatch const & d) const5345   VULKAN_HPP_INLINE void CommandBuffer::setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5346   {
5347     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5348     d.vkCmdSetStencilOp( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), static_cast<VkStencilOp>( failOp ), static_cast<VkStencilOp>( passOp ), static_cast<VkStencilOp>( depthFailOp ), static_cast<VkCompareOp>( compareOp ) );
5349   }
5350 
5351 
5352   template <typename Dispatch>
setRasterizerDiscardEnable(VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable,Dispatch const & d) const5353   VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5354   {
5355     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5356     d.vkCmdSetRasterizerDiscardEnable( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) );
5357   }
5358 
5359 
5360   template <typename Dispatch>
setDepthBiasEnable(VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable,Dispatch const & d) const5361   VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5362   {
5363     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5364     d.vkCmdSetDepthBiasEnable( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) );
5365   }
5366 
5367 
5368   template <typename Dispatch>
setPrimitiveRestartEnable(VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable,Dispatch const & d) const5369   VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5370   {
5371     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5372     d.vkCmdSetPrimitiveRestartEnable( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) );
5373   }
5374 
5375 
5376   template <typename Dispatch>
getBufferMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const5377   VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5378   {
5379     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5380     d.vkGetDeviceBufferMemoryRequirements( m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
5381   }
5382 
5383 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5384   template <typename Dispatch>
getBufferMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info,Dispatch const & d) const5385   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5386   {
5387     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5388 
5389 
5390     VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
5391     d.vkGetDeviceBufferMemoryRequirements( m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
5392 
5393 
5394     return memoryRequirements;
5395   }
5396 
5397   template <typename X, typename Y, typename... Z, typename Dispatch>
getBufferMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info,Dispatch const & d) const5398   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5399   {
5400     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5401 
5402 
5403     VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
5404     VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
5405     d.vkGetDeviceBufferMemoryRequirements( m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
5406 
5407 
5408     return structureChain;
5409   }
5410 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5411 
5412 
5413   template <typename Dispatch>
getImageMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,Dispatch const & d) const5414   VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5415   {
5416     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5417     d.vkGetDeviceImageMemoryRequirements( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
5418   }
5419 
5420 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5421   template <typename Dispatch>
getImageMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,Dispatch const & d) const5422   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5423   {
5424     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5425 
5426 
5427     VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
5428     d.vkGetDeviceImageMemoryRequirements( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
5429 
5430 
5431     return memoryRequirements;
5432   }
5433 
5434   template <typename X, typename Y, typename... Z, typename Dispatch>
getImageMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,Dispatch const & d) const5435   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5436   {
5437     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5438 
5439 
5440     VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
5441     VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
5442     d.vkGetDeviceImageMemoryRequirements( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
5443 
5444 
5445     return structureChain;
5446   }
5447 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5448 
5449 
5450   template <typename Dispatch>
getImageSparseMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,uint32_t * pSparseMemoryRequirementCount,VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,Dispatch const & d) const5451   VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, uint32_t * pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5452   {
5453     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5454     d.vkGetDeviceImageSparseMemoryRequirements( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
5455   }
5456 
5457 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5458   template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
getImageSparseMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,Dispatch const & d) const5459   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const
5460   {
5461     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5462 
5463 
5464     std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
5465     uint32_t sparseMemoryRequirementCount;
5466     d.vkGetDeviceImageSparseMemoryRequirements( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
5467     sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
5468     d.vkGetDeviceImageSparseMemoryRequirements( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
5469 
5470     VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
5471     if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
5472     {
5473       sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
5474     }
5475     return sparseMemoryRequirements;
5476   }
5477 
5478   template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements2>::value, int>::type>
getImageSparseMemoryRequirements(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,Dispatch const & d) const5479   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, Dispatch const & d ) const
5480   {
5481     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5482 
5483 
5484     std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( sparseImageMemoryRequirements2Allocator );
5485     uint32_t sparseMemoryRequirementCount;
5486     d.vkGetDeviceImageSparseMemoryRequirements( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
5487     sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
5488     d.vkGetDeviceImageSparseMemoryRequirements( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
5489 
5490     VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
5491     if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
5492     {
5493       sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
5494     }
5495     return sparseMemoryRequirements;
5496   }
5497 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5498 
5499   //=== VKSC_VERSION_1_0 ===
5500 
5501 
5502   template <typename Dispatch>
getCommandPoolMemoryConsumption(VULKAN_HPP_NAMESPACE::CommandPool commandPool,VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer,VULKAN_HPP_NAMESPACE::CommandPoolMemoryConsumption * pConsumption,Dispatch const & d) const5503   VULKAN_HPP_INLINE void Device::getCommandPoolMemoryConsumption( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer, VULKAN_HPP_NAMESPACE::CommandPoolMemoryConsumption * pConsumption, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5504   {
5505     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5506     d.vkGetCommandPoolMemoryConsumption( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandBuffer>( commandBuffer ), reinterpret_cast<VkCommandPoolMemoryConsumption *>( pConsumption ) );
5507   }
5508 
5509 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5510   template <typename Dispatch>
getCommandPoolMemoryConsumption(VULKAN_HPP_NAMESPACE::CommandPool commandPool,VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer,Dispatch const & d) const5511   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::CommandPoolMemoryConsumption Device::getCommandPoolMemoryConsumption( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5512   {
5513     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5514 
5515 
5516     VULKAN_HPP_NAMESPACE::CommandPoolMemoryConsumption consumption;
5517     d.vkGetCommandPoolMemoryConsumption( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandBuffer>( commandBuffer ), reinterpret_cast<VkCommandPoolMemoryConsumption *>( &consumption ) );
5518 
5519 
5520     return consumption;
5521   }
5522 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5523 
5524 
5525   template <typename Dispatch>
getFaultData(VULKAN_HPP_NAMESPACE::FaultQueryBehavior faultQueryBehavior,VULKAN_HPP_NAMESPACE::Bool32 * pUnrecordedFaults,uint32_t * pFaultCount,VULKAN_HPP_NAMESPACE::FaultData * pFaults,Dispatch const & d) const5526   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFaultData( VULKAN_HPP_NAMESPACE::FaultQueryBehavior faultQueryBehavior, VULKAN_HPP_NAMESPACE::Bool32 * pUnrecordedFaults, uint32_t * pFaultCount, VULKAN_HPP_NAMESPACE::FaultData * pFaults, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5527   {
5528     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5529     return static_cast<Result>( d.vkGetFaultData( m_device, static_cast<VkFaultQueryBehavior>( faultQueryBehavior ), reinterpret_cast<VkBool32 *>( pUnrecordedFaults ), pFaultCount, reinterpret_cast<VkFaultData *>( pFaults ) ) );
5530   }
5531 
5532 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5533   template <typename FaultDataAllocator, typename Dispatch>
getFaultData(VULKAN_HPP_NAMESPACE::FaultQueryBehavior faultQueryBehavior,Dispatch const & d) const5534   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<VULKAN_HPP_NAMESPACE::Bool32, std::vector<VULKAN_HPP_NAMESPACE::FaultData, FaultDataAllocator>>>::type Device::getFaultData( VULKAN_HPP_NAMESPACE::FaultQueryBehavior faultQueryBehavior, Dispatch const & d ) const
5535   {
5536     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5537 
5538 
5539     std::pair<VULKAN_HPP_NAMESPACE::Bool32,std::vector<VULKAN_HPP_NAMESPACE::FaultData, FaultDataAllocator>> data_;
5540     VULKAN_HPP_NAMESPACE::Bool32 & unrecordedFaults = data_.first;
5541     std::vector<VULKAN_HPP_NAMESPACE::FaultData, FaultDataAllocator> & faults = data_.second;
5542     uint32_t faultCount;
5543     VkResult result;
5544     do
5545     {
5546       result = d.vkGetFaultData( m_device, static_cast<VkFaultQueryBehavior>( faultQueryBehavior ), reinterpret_cast<VkBool32 *>( &unrecordedFaults ), &faultCount, nullptr );
5547       if ( ( result == VK_SUCCESS ) && faultCount )
5548       {
5549         faults.resize( faultCount );
5550         result = d.vkGetFaultData( m_device, static_cast<VkFaultQueryBehavior>( faultQueryBehavior ), reinterpret_cast<VkBool32 *>( &unrecordedFaults ), &faultCount, reinterpret_cast<VkFaultData *>( faults.data() ) );
5551       }
5552     } while ( result == VK_INCOMPLETE );
5553     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFaultData" );
5554 
5555     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data_ );
5556   }
5557 
5558   template <typename FaultDataAllocator, typename Dispatch, typename B2, typename std::enable_if<std::is_same<typename B2::value_type, FaultData>::value, int>::type>
getFaultData(VULKAN_HPP_NAMESPACE::FaultQueryBehavior faultQueryBehavior,FaultDataAllocator & faultDataAllocator,Dispatch const & d) const5559   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<VULKAN_HPP_NAMESPACE::Bool32, std::vector<VULKAN_HPP_NAMESPACE::FaultData, FaultDataAllocator>>>::type Device::getFaultData( VULKAN_HPP_NAMESPACE::FaultQueryBehavior faultQueryBehavior, FaultDataAllocator & faultDataAllocator, Dispatch const & d ) const
5560   {
5561     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5562 
5563 
5564     std::pair<VULKAN_HPP_NAMESPACE::Bool32,std::vector<VULKAN_HPP_NAMESPACE::FaultData, FaultDataAllocator>> data_( std::piecewise_construct, std::forward_as_tuple(), std::forward_as_tuple( faultDataAllocator ) );
5565     VULKAN_HPP_NAMESPACE::Bool32 & unrecordedFaults = data_.first;
5566     std::vector<VULKAN_HPP_NAMESPACE::FaultData, FaultDataAllocator> & faults = data_.second;
5567     uint32_t faultCount;
5568     VkResult result;
5569     do
5570     {
5571       result = d.vkGetFaultData( m_device, static_cast<VkFaultQueryBehavior>( faultQueryBehavior ), reinterpret_cast<VkBool32 *>( &unrecordedFaults ), &faultCount, nullptr );
5572       if ( ( result == VK_SUCCESS ) && faultCount )
5573       {
5574         faults.resize( faultCount );
5575         result = d.vkGetFaultData( m_device, static_cast<VkFaultQueryBehavior>( faultQueryBehavior ), reinterpret_cast<VkBool32 *>( &unrecordedFaults ), &faultCount, reinterpret_cast<VkFaultData *>( faults.data() ) );
5576       }
5577     } while ( result == VK_INCOMPLETE );
5578     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFaultData" );
5579 
5580     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data_ );
5581   }
5582 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5583 
5584   //=== VK_KHR_surface ===
5585 
5586 
5587   template <typename Dispatch>
destroySurfaceKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const5588   VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5589   {
5590     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5591     d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
5592   }
5593 
5594 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5595   template <typename Dispatch>
destroySurfaceKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const5596   VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5597   {
5598     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5599 
5600 
5601 
5602     d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
5603 
5604 
5605 
5606   }
5607 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5608 
5609 
5610   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const5611   VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5612   {
5613     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5614     d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
5615   }
5616 
5617 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5618   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const5619   VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
5620   {
5621     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5622 
5623 
5624 
5625     d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
5626 
5627 
5628 
5629   }
5630 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5631 
5632 
5633   template <typename Dispatch>
getSurfaceSupportKHR(uint32_t queueFamilyIndex,VULKAN_HPP_NAMESPACE::SurfaceKHR surface,VULKAN_HPP_NAMESPACE::Bool32 * pSupported,Dispatch const & d) const5634   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::Bool32 * pSupported, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5635   {
5636     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5637     return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( pSupported ) ) );
5638   }
5639 
5640 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5641   template <typename Dispatch>
getSurfaceSupportKHR(uint32_t queueFamilyIndex,VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const5642   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
5643   {
5644     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5645 
5646 
5647     VULKAN_HPP_NAMESPACE::Bool32 supported;
5648     VkResult result = d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( &supported ) );
5649     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" );
5650 
5651     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), supported );
5652   }
5653 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5654 
5655 
5656   template <typename Dispatch>
getSurfaceCapabilitiesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities,Dispatch const & d) const5657   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5658   {
5659     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5660     return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR *>( pSurfaceCapabilities ) ) );
5661   }
5662 
5663 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5664   template <typename Dispatch>
getSurfaceCapabilitiesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const5665   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
5666   {
5667     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5668 
5669 
5670     VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities;
5671     VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR *>( &surfaceCapabilities ) );
5672     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" );
5673 
5674     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceCapabilities );
5675   }
5676 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5677 
5678 
5679   template <typename Dispatch>
getSurfaceFormatsKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,uint32_t * pSurfaceFormatCount,VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats,Dispatch const & d) const5680   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t * pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5681   {
5682     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5683     return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( pSurfaceFormats ) ) );
5684   }
5685 
5686 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5687   template <typename SurfaceFormatKHRAllocator, typename Dispatch>
getSurfaceFormatsKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const5688   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
5689   {
5690     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5691 
5692 
5693     std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats;
5694     uint32_t surfaceFormatCount;
5695     VkResult result;
5696     do
5697     {
5698       result = d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr );
5699       if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
5700       {
5701         surfaceFormats.resize( surfaceFormatCount );
5702         result = d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) );
5703       }
5704     } while ( result == VK_INCOMPLETE );
5705     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" );
5706     VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
5707     if ( surfaceFormatCount < surfaceFormats.size() )
5708     {
5709       surfaceFormats.resize( surfaceFormatCount );
5710     }
5711     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats );
5712   }
5713 
5714   template <typename SurfaceFormatKHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, SurfaceFormatKHR>::value, int>::type>
getSurfaceFormatsKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator,Dispatch const & d) const5715   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator, Dispatch const & d ) const
5716   {
5717     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5718 
5719 
5720     std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats( surfaceFormatKHRAllocator );
5721     uint32_t surfaceFormatCount;
5722     VkResult result;
5723     do
5724     {
5725       result = d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr );
5726       if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
5727       {
5728         surfaceFormats.resize( surfaceFormatCount );
5729         result = d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) );
5730       }
5731     } while ( result == VK_INCOMPLETE );
5732     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" );
5733     VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
5734     if ( surfaceFormatCount < surfaceFormats.size() )
5735     {
5736       surfaceFormats.resize( surfaceFormatCount );
5737     }
5738     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats );
5739   }
5740 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5741 
5742 
5743   template <typename Dispatch>
getSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,uint32_t * pPresentModeCount,VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,Dispatch const & d) const5744   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t * pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5745   {
5746     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5747     return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pPresentModeCount, reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) );
5748   }
5749 
5750 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5751   template <typename PresentModeKHRAllocator, typename Dispatch>
getSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const5752   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
5753   {
5754     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5755 
5756 
5757     std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes;
5758     uint32_t presentModeCount;
5759     VkResult result;
5760     do
5761     {
5762       result = d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr );
5763       if ( ( result == VK_SUCCESS ) && presentModeCount )
5764       {
5765         presentModes.resize( presentModeCount );
5766         result = d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
5767       }
5768     } while ( result == VK_INCOMPLETE );
5769     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" );
5770     VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
5771     if ( presentModeCount < presentModes.size() )
5772     {
5773       presentModes.resize( presentModeCount );
5774     }
5775     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentModes );
5776   }
5777 
5778   template <typename PresentModeKHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, PresentModeKHR>::value, int>::type>
getSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,PresentModeKHRAllocator & presentModeKHRAllocator,Dispatch const & d) const5779   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, PresentModeKHRAllocator & presentModeKHRAllocator, Dispatch const & d ) const
5780   {
5781     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5782 
5783 
5784     std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator );
5785     uint32_t presentModeCount;
5786     VkResult result;
5787     do
5788     {
5789       result = d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr );
5790       if ( ( result == VK_SUCCESS ) && presentModeCount )
5791       {
5792         presentModes.resize( presentModeCount );
5793         result = d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
5794       }
5795     } while ( result == VK_INCOMPLETE );
5796     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" );
5797     VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
5798     if ( presentModeCount < presentModes.size() )
5799     {
5800       presentModes.resize( presentModeCount );
5801     }
5802     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentModes );
5803   }
5804 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5805 
5806   //=== VK_KHR_swapchain ===
5807 
5808 
5809   template <typename Dispatch>
createSwapchainKHR(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain,Dispatch const & d) const5810   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5811   {
5812     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5813     return static_cast<Result>( d.vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSwapchainKHR *>( pSwapchain ) ) );
5814   }
5815 
5816 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5817   template <typename Dispatch>
createSwapchainKHR(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const5818   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
5819   {
5820     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5821 
5822 
5823     VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
5824     VkResult result = d.vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( &swapchain ) );
5825     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" );
5826 
5827     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchain );
5828   }
5829 
5830 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
5831   template <typename Dispatch>
createSwapchainKHRUnique(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const5832   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type Device::createSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
5833   {
5834     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5835 
5836 
5837     VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
5838     VkResult result = d.vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( &swapchain ) );
5839     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique" );
5840 
5841     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>( swapchain, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
5842   }
5843 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
5844 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5845 
5846 
5847   template <typename Dispatch>
getSwapchainImagesKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint32_t * pSwapchainImageCount,VULKAN_HPP_NAMESPACE::Image * pSwapchainImages,Dispatch const & d) const5848   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t * pSwapchainImageCount, VULKAN_HPP_NAMESPACE::Image * pSwapchainImages, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5849   {
5850     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5851     return static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), pSwapchainImageCount, reinterpret_cast<VkImage *>( pSwapchainImages ) ) );
5852   }
5853 
5854 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5855   template <typename ImageAllocator, typename Dispatch>
getSwapchainImagesKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const5856   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator>>::type Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
5857   {
5858     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5859 
5860 
5861     std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator> swapchainImages;
5862     uint32_t swapchainImageCount;
5863     VkResult result;
5864     do
5865     {
5866       result = d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr );
5867       if ( ( result == VK_SUCCESS ) && swapchainImageCount )
5868       {
5869         swapchainImages.resize( swapchainImageCount );
5870         result = d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) );
5871       }
5872     } while ( result == VK_INCOMPLETE );
5873     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" );
5874     VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
5875     if ( swapchainImageCount < swapchainImages.size() )
5876     {
5877       swapchainImages.resize( swapchainImageCount );
5878     }
5879     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchainImages );
5880   }
5881 
5882   template <typename ImageAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, Image>::value, int>::type>
getSwapchainImagesKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,ImageAllocator & imageAllocator,Dispatch const & d) const5883   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator>>::type Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, ImageAllocator & imageAllocator, Dispatch const & d ) const
5884   {
5885     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5886 
5887 
5888     std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator> swapchainImages( imageAllocator );
5889     uint32_t swapchainImageCount;
5890     VkResult result;
5891     do
5892     {
5893       result = d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr );
5894       if ( ( result == VK_SUCCESS ) && swapchainImageCount )
5895       {
5896         swapchainImages.resize( swapchainImageCount );
5897         result = d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) );
5898       }
5899     } while ( result == VK_INCOMPLETE );
5900     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" );
5901     VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
5902     if ( swapchainImageCount < swapchainImages.size() )
5903     {
5904       swapchainImages.resize( swapchainImageCount );
5905     }
5906     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchainImages );
5907   }
5908 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5909 
5910 
5911   template <typename Dispatch>
acquireNextImageKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint64_t timeout,VULKAN_HPP_NAMESPACE::Semaphore semaphore,VULKAN_HPP_NAMESPACE::Fence fence,uint32_t * pImageIndex,Dispatch const & d) const5912   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, uint32_t * pImageIndex, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5913   {
5914     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5915     return static_cast<Result>( d.vkAcquireNextImageKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), pImageIndex ) );
5916   }
5917 
5918 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5919   template <typename Dispatch>
acquireNextImageKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,uint64_t timeout,VULKAN_HPP_NAMESPACE::Semaphore semaphore,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const5920   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
5921   {
5922     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5923 
5924 
5925     uint32_t imageIndex;
5926     VkResult result = d.vkAcquireNextImageKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), &imageIndex );
5927     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eNotReady, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
5928 
5929     return ResultValue<uint32_t>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageIndex );
5930   }
5931 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5932 
5933 
5934   template <typename Dispatch>
presentKHR(const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo,Dispatch const & d) const5935   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5936   {
5937     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5938     return static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( pPresentInfo ) ) );
5939   }
5940 
5941 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5942   template <typename Dispatch>
presentKHR(const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo,Dispatch const & d) const5943   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo, Dispatch const & d ) const
5944   {
5945     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5946 
5947 
5948 
5949     VkResult result = d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( &presentInfo ) );
5950     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
5951 
5952     return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
5953   }
5954 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5955 
5956 
5957   template <typename Dispatch>
getGroupPresentCapabilitiesKHR(VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities,Dispatch const & d) const5958   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5959   {
5960     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5961     return static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( pDeviceGroupPresentCapabilities ) ) );
5962   }
5963 
5964 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5965   template <typename Dispatch>
getGroupPresentCapabilitiesKHR(Dispatch const & d) const5966   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type Device::getGroupPresentCapabilitiesKHR( Dispatch const & d ) const
5967   {
5968     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5969 
5970 
5971     VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities;
5972     VkResult result = d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( &deviceGroupPresentCapabilities ) );
5973     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" );
5974 
5975     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), deviceGroupPresentCapabilities );
5976   }
5977 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
5978 
5979 
5980   template <typename Dispatch>
getGroupSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,Dispatch const & d) const5981   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
5982   {
5983     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5984     return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
5985   }
5986 
5987 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5988   template <typename Dispatch>
getGroupSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const5989   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
5990   {
5991     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
5992 
5993 
5994     VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
5995     VkResult result = d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) );
5996     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" );
5997 
5998     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), modes );
5999   }
6000 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6001 
6002 
6003   template <typename Dispatch>
getPresentRectanglesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,uint32_t * pRectCount,VULKAN_HPP_NAMESPACE::Rect2D * pRects,Dispatch const & d) const6004   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t * pRectCount, VULKAN_HPP_NAMESPACE::Rect2D * pRects, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
6005   {
6006     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6007     return static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pRectCount, reinterpret_cast<VkRect2D *>( pRects ) ) );
6008   }
6009 
6010 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6011   template <typename Rect2DAllocator, typename Dispatch>
getPresentRectanglesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const6012   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator>>::type PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
6013   {
6014     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6015 
6016 
6017     std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator> rects;
6018     uint32_t rectCount;
6019     VkResult result;
6020     do
6021     {
6022       result = d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr );
6023       if ( ( result == VK_SUCCESS ) && rectCount )
6024       {
6025         rects.resize( rectCount );
6026         result = d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) );
6027       }
6028     } while ( result == VK_INCOMPLETE );
6029     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" );
6030     VULKAN_HPP_ASSERT( rectCount <= rects.size() );
6031     if ( rectCount < rects.size() )
6032     {
6033       rects.resize( rectCount );
6034     }
6035     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), rects );
6036   }
6037 
6038   template <typename Rect2DAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, Rect2D>::value, int>::type>
getPresentRectanglesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Rect2DAllocator & rect2DAllocator,Dispatch const & d) const6039   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator>>::type PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Rect2DAllocator & rect2DAllocator, Dispatch const & d ) const
6040   {
6041     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6042 
6043 
6044     std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator> rects( rect2DAllocator );
6045     uint32_t rectCount;
6046     VkResult result;
6047     do
6048     {
6049       result = d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr );
6050       if ( ( result == VK_SUCCESS ) && rectCount )
6051       {
6052         rects.resize( rectCount );
6053         result = d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) );
6054       }
6055     } while ( result == VK_INCOMPLETE );
6056     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" );
6057     VULKAN_HPP_ASSERT( rectCount <= rects.size() );
6058     if ( rectCount < rects.size() )
6059     {
6060       rects.resize( rectCount );
6061     }
6062     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), rects );
6063   }
6064 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6065 
6066 
6067   template <typename Dispatch>
acquireNextImage2KHR(const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo,uint32_t * pImageIndex,Dispatch const & d) const6068   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo, uint32_t * pImageIndex, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
6069   {
6070     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6071     return static_cast<Result>( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( pAcquireInfo ), pImageIndex ) );
6072   }
6073 
6074 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6075   template <typename Dispatch>
acquireNextImage2KHR(const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo,Dispatch const & d) const6076   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo, Dispatch const & d ) const
6077   {
6078     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6079 
6080 
6081     uint32_t imageIndex;
6082     VkResult result = d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( &acquireInfo ), &imageIndex );
6083     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eNotReady, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
6084 
6085     return ResultValue<uint32_t>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageIndex );
6086   }
6087 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6088 
6089   //=== VK_KHR_display ===
6090 
6091 
6092   template <typename Dispatch>
getDisplayPropertiesKHR(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties,Dispatch const & d) const6093   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
6094   {
6095     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6096     return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( pProperties ) ) );
6097   }
6098 
6099 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6100   template <typename DisplayPropertiesKHRAllocator, typename Dispatch>
getDisplayPropertiesKHR(Dispatch const & d) const6101   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type PhysicalDevice::getDisplayPropertiesKHR( Dispatch const & d ) const
6102   {
6103     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6104 
6105 
6106     std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties;
6107     uint32_t propertyCount;
6108     VkResult result;
6109     do
6110     {
6111       result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr );
6112       if ( ( result == VK_SUCCESS ) && propertyCount )
6113       {
6114         properties.resize( propertyCount );
6115         result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) );
6116       }
6117     } while ( result == VK_INCOMPLETE );
6118     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" );
6119     VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
6120     if ( propertyCount < properties.size() )
6121     {
6122       properties.resize( propertyCount );
6123     }
6124     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
6125   }
6126 
6127   template <typename DisplayPropertiesKHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, DisplayPropertiesKHR>::value, int>::type>
getDisplayPropertiesKHR(DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator,Dispatch const & d) const6128   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type PhysicalDevice::getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator, Dispatch const & d ) const
6129   {
6130     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6131 
6132 
6133     std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties( displayPropertiesKHRAllocator );
6134     uint32_t propertyCount;
6135     VkResult result;
6136     do
6137     {
6138       result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr );
6139       if ( ( result == VK_SUCCESS ) && propertyCount )
6140       {
6141         properties.resize( propertyCount );
6142         result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) );
6143       }
6144     } while ( result == VK_INCOMPLETE );
6145     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" );
6146     VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
6147     if ( propertyCount < properties.size() )
6148     {
6149       properties.resize( propertyCount );
6150     }
6151     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
6152   }
6153 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6154 
6155 
6156   template <typename Dispatch>
getDisplayPlanePropertiesKHR(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties,Dispatch const & d) const6157   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
6158   {
6159     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6160     return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( pProperties ) ) );
6161   }
6162 
6163 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6164   template <typename DisplayPlanePropertiesKHRAllocator, typename Dispatch>
getDisplayPlanePropertiesKHR(Dispatch const & d) const6165   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type PhysicalDevice::getDisplayPlanePropertiesKHR( Dispatch const & d ) const
6166   {
6167     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6168 
6169 
6170     std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties;
6171     uint32_t propertyCount;
6172     VkResult result;
6173     do
6174     {
6175       result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr );
6176       if ( ( result == VK_SUCCESS ) && propertyCount )
6177       {
6178         properties.resize( propertyCount );
6179         result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) );
6180       }
6181     } while ( result == VK_INCOMPLETE );
6182     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" );
6183     VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
6184     if ( propertyCount < properties.size() )
6185     {
6186       properties.resize( propertyCount );
6187     }
6188     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
6189   }
6190 
6191   template <typename DisplayPlanePropertiesKHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, DisplayPlanePropertiesKHR>::value, int>::type>
getDisplayPlanePropertiesKHR(DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator,Dispatch const & d) const6192   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type PhysicalDevice::getDisplayPlanePropertiesKHR( DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, Dispatch const & d ) const
6193   {
6194     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6195 
6196 
6197     std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties( displayPlanePropertiesKHRAllocator );
6198     uint32_t propertyCount;
6199     VkResult result;
6200     do
6201     {
6202       result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr );
6203       if ( ( result == VK_SUCCESS ) && propertyCount )
6204       {
6205         properties.resize( propertyCount );
6206         result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) );
6207       }
6208     } while ( result == VK_INCOMPLETE );
6209     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" );
6210     VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
6211     if ( propertyCount < properties.size() )
6212     {
6213       properties.resize( propertyCount );
6214     }
6215     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
6216   }
6217 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6218 
6219 
6220   template <typename Dispatch>
getDisplayPlaneSupportedDisplaysKHR(uint32_t planeIndex,uint32_t * pDisplayCount,VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays,Dispatch const & d) const6221   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t * pDisplayCount, VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
6222   {
6223     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6224     return static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR *>( pDisplays ) ) );
6225   }
6226 
6227 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6228   template <typename DisplayKHRAllocator, typename Dispatch>
getDisplayPlaneSupportedDisplaysKHR(uint32_t planeIndex,Dispatch const & d) const6229   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator>>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d ) const
6230   {
6231     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6232 
6233 
6234     std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator> displays;
6235     uint32_t displayCount;
6236     VkResult result;
6237     do
6238     {
6239       result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr );
6240       if ( ( result == VK_SUCCESS ) && displayCount )
6241       {
6242         displays.resize( displayCount );
6243         result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) );
6244       }
6245     } while ( result == VK_INCOMPLETE );
6246     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
6247     VULKAN_HPP_ASSERT( displayCount <= displays.size() );
6248     if ( displayCount < displays.size() )
6249     {
6250       displays.resize( displayCount );
6251     }
6252     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), displays );
6253   }
6254 
6255   template <typename DisplayKHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, DisplayKHR>::value, int>::type>
getDisplayPlaneSupportedDisplaysKHR(uint32_t planeIndex,DisplayKHRAllocator & displayKHRAllocator,Dispatch const & d) const6256   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator>>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, DisplayKHRAllocator & displayKHRAllocator, Dispatch const & d ) const
6257   {
6258     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6259 
6260 
6261     std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator> displays( displayKHRAllocator );
6262     uint32_t displayCount;
6263     VkResult result;
6264     do
6265     {
6266       result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr );
6267       if ( ( result == VK_SUCCESS ) && displayCount )
6268       {
6269         displays.resize( displayCount );
6270         result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) );
6271       }
6272     } while ( result == VK_INCOMPLETE );
6273     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
6274     VULKAN_HPP_ASSERT( displayCount <= displays.size() );
6275     if ( displayCount < displays.size() )
6276     {
6277       displays.resize( displayCount );
6278     }
6279     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), displays );
6280   }
6281 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6282 
6283 
6284   template <typename Dispatch>
getDisplayModePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties,Dispatch const & d) const6285   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
6286   {
6287     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6288     return static_cast<Result>( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( pProperties ) ) );
6289   }
6290 
6291 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6292   template <typename DisplayModePropertiesKHRAllocator, typename Dispatch>
getDisplayModePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const6293   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
6294   {
6295     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6296 
6297 
6298     std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties;
6299     uint32_t propertyCount;
6300     VkResult result;
6301     do
6302     {
6303       result = d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr );
6304       if ( ( result == VK_SUCCESS ) && propertyCount )
6305       {
6306         properties.resize( propertyCount );
6307         result = d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) );
6308       }
6309     } while ( result == VK_INCOMPLETE );
6310     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" );
6311     VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
6312     if ( propertyCount < properties.size() )
6313     {
6314       properties.resize( propertyCount );
6315     }
6316     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
6317   }
6318 
6319   template <typename DisplayModePropertiesKHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, DisplayModePropertiesKHR>::value, int>::type>
getDisplayModePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator,Dispatch const & d) const6320   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator, Dispatch const & d ) const
6321   {
6322     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6323 
6324 
6325     std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties( displayModePropertiesKHRAllocator );
6326     uint32_t propertyCount;
6327     VkResult result;
6328     do
6329     {
6330       result = d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr );
6331       if ( ( result == VK_SUCCESS ) && propertyCount )
6332       {
6333         properties.resize( propertyCount );
6334         result = d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) );
6335       }
6336     } while ( result == VK_INCOMPLETE );
6337     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" );
6338     VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
6339     if ( propertyCount < properties.size() )
6340     {
6341       properties.resize( propertyCount );
6342     }
6343     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
6344   }
6345 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6346 
6347 
6348   template <typename Dispatch>
createDisplayModeKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode,Dispatch const & d) const6349   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
6350   {
6351     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6352     return static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDisplayModeKHR *>( pMode ) ) );
6353   }
6354 
6355 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6356   template <typename Dispatch>
createDisplayModeKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6357   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
6358   {
6359     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6360 
6361 
6362     VULKAN_HPP_NAMESPACE::DisplayModeKHR mode;
6363     VkResult result = d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDisplayModeKHR *>( &mode ) );
6364     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" );
6365 
6366     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), mode );
6367   }
6368 
6369 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
6370   template <typename Dispatch>
createDisplayModeKHRUnique(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6371   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>>::type PhysicalDevice::createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
6372   {
6373     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6374 
6375 
6376     VULKAN_HPP_NAMESPACE::DisplayModeKHR mode;
6377     VkResult result = d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDisplayModeKHR *>( &mode ) );
6378     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique" );
6379 
6380     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>( mode, ObjectDestroy<PhysicalDevice, Dispatch>( *this, allocator, d ) ) );
6381   }
6382 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
6383 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6384 
6385 
6386   template <typename Dispatch>
getDisplayPlaneCapabilitiesKHR(VULKAN_HPP_NAMESPACE::DisplayModeKHR mode,uint32_t planeIndex,VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities,Dispatch const & d) const6387   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
6388   {
6389     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6390     return static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( pCapabilities ) ) );
6391   }
6392 
6393 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6394   template <typename Dispatch>
getDisplayPlaneCapabilitiesKHR(VULKAN_HPP_NAMESPACE::DisplayModeKHR mode,uint32_t planeIndex,Dispatch const & d) const6395   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const & d ) const
6396   {
6397     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6398 
6399 
6400     VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities;
6401     VkResult result = d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( &capabilities ) );
6402     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" );
6403 
6404     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), capabilities );
6405   }
6406 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6407 
6408 
6409   template <typename Dispatch>
createDisplayPlaneSurfaceKHR(const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const6410   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
6411   {
6412     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6413     return static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
6414   }
6415 
6416 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6417   template <typename Dispatch>
createDisplayPlaneSurfaceKHR(const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6418   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
6419   {
6420     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6421 
6422 
6423     VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
6424     VkResult result = d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
6425     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" );
6426 
6427     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
6428   }
6429 
6430 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
6431   template <typename Dispatch>
createDisplayPlaneSurfaceKHRUnique(const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6432   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createDisplayPlaneSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
6433   {
6434     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6435 
6436 
6437     VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
6438     VkResult result = d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
6439     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique" );
6440 
6441     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
6442   }
6443 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
6444 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6445 
6446   //=== VK_KHR_display_swapchain ===
6447 
6448 
6449   template <typename Dispatch>
createSharedSwapchainsKHR(uint32_t swapchainCount,const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,Dispatch const & d) const6450   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
6451   {
6452     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6453     return static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, swapchainCount, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSwapchainKHR *>( pSwapchains ) ) );
6454   }
6455 
6456 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6457   template <typename SwapchainKHRAllocator, typename Dispatch>
createSharedSwapchainsKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6458   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator>>::type Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
6459   {
6460     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6461 
6462 
6463     std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size() );
6464     VkResult result = d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size(), reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) );
6465     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" );
6466 
6467     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchains );
6468   }
6469 
6470   template <typename SwapchainKHRAllocator, typename Dispatch, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, SwapchainKHR>::value, int>::type>
createSharedSwapchainsKHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,SwapchainKHRAllocator & swapchainKHRAllocator,Dispatch const & d) const6471   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator>>::type Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, SwapchainKHRAllocator & swapchainKHRAllocator, Dispatch const & d ) const
6472   {
6473     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6474 
6475 
6476     std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size(), swapchainKHRAllocator );
6477     VkResult result = d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size(), reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) );
6478     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" );
6479 
6480     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchains );
6481   }
6482 
6483   template <typename Dispatch>
createSharedSwapchainKHR(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6484   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type Device::createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
6485   {
6486     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6487 
6488 
6489     VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
6490     VkResult result = d.vkCreateSharedSwapchainsKHR( m_device, 1, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( &swapchain ) );
6491     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHR" );
6492 
6493     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchain );
6494   }
6495 
6496 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
6497   template <typename Dispatch, typename SwapchainKHRAllocator>
createSharedSwapchainsKHRUnique(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6498   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type Device::createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
6499   {
6500     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6501 
6502 
6503     std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains( createInfos.size() );
6504     VkResult result = d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size(), reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) );
6505     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
6506     std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains;
6507     uniqueSwapchains.reserve( createInfos.size() );
6508     ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
6509     for ( auto const & swapchain : swapchains )
6510     {
6511       uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchain, deleter ) );
6512     }
6513     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueSwapchains ) );
6514   }
6515 
6516   template <typename Dispatch, typename SwapchainKHRAllocator, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<SwapchainKHR, Dispatch>>::value, int>::type>
createSharedSwapchainsKHRUnique(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,SwapchainKHRAllocator & swapchainKHRAllocator,Dispatch const & d) const6517   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type Device::createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, SwapchainKHRAllocator & swapchainKHRAllocator, Dispatch const & d ) const
6518   {
6519     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6520 
6521 
6522     std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains( createInfos.size() );
6523     VkResult result = d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size(), reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) );
6524     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
6525     std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains( swapchainKHRAllocator );
6526     uniqueSwapchains.reserve( createInfos.size() );
6527     ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
6528     for ( auto const & swapchain : swapchains )
6529     {
6530       uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchain, deleter ) );
6531     }
6532     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueSwapchains ) );
6533   }
6534 
6535   template <typename Dispatch>
createSharedSwapchainKHRUnique(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6536   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type Device::createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
6537   {
6538     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6539 
6540 
6541     VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
6542     VkResult result = d.vkCreateSharedSwapchainsKHR( m_device, 1, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( &swapchain ) );
6543     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique" );
6544 
6545     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>( swapchain, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
6546   }
6547 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
6548 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6549 
6550   //=== VK_KHR_external_memory_fd ===
6551 
6552 
6553   template <typename Dispatch>
getMemoryFdKHR(const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo,int * pFd,Dispatch const & d) const6554   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo, int * pFd, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
6555   {
6556     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6557     return static_cast<Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
6558   }
6559 
6560 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6561   template <typename Dispatch>
getMemoryFdKHR(const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo,Dispatch const & d) const6562   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo, Dispatch const & d ) const
6563   {
6564     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6565 
6566 
6567     int fd;
6568     VkResult result = d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( &getFdInfo ), &fd );
6569     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" );
6570 
6571     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fd );
6572   }
6573 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6574 
6575 
6576   template <typename Dispatch>
getMemoryFdPropertiesKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,int fd,VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties,Dispatch const & d) const6577   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
6578   {
6579     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6580     return static_cast<Result>( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR *>( pMemoryFdProperties ) ) );
6581   }
6582 
6583 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6584   template <typename Dispatch>
getMemoryFdPropertiesKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,int fd,Dispatch const & d) const6585   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const & d ) const
6586   {
6587     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6588 
6589 
6590     VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties;
6591     VkResult result = d.vkGetMemoryFdPropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR *>( &memoryFdProperties ) );
6592     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" );
6593 
6594     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryFdProperties );
6595   }
6596 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6597 
6598   //=== VK_KHR_external_semaphore_fd ===
6599 
6600 
6601   template <typename Dispatch>
importSemaphoreFdKHR(const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo,Dispatch const & d) const6602   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
6603   {
6604     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6605     return static_cast<Result>( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( pImportSemaphoreFdInfo ) ) );
6606   }
6607 
6608 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6609   template <typename Dispatch>
importSemaphoreFdKHR(const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo,Dispatch const & d) const6610   VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d ) const
6611   {
6612     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6613 
6614 
6615 
6616     VkResult result = d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( &importSemaphoreFdInfo ) );
6617     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" );
6618 
6619     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
6620   }
6621 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6622 
6623 
6624   template <typename Dispatch>
getSemaphoreFdKHR(const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo,int * pFd,Dispatch const & d) const6625   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo, int * pFd, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
6626   {
6627     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6628     return static_cast<Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
6629   }
6630 
6631 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6632   template <typename Dispatch>
getSemaphoreFdKHR(const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo,Dispatch const & d) const6633   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const & d ) const
6634   {
6635     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6636 
6637 
6638     int fd;
6639     VkResult result = d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( &getFdInfo ), &fd );
6640     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" );
6641 
6642     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fd );
6643   }
6644 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6645 
6646   //=== VK_EXT_direct_mode_display ===
6647 
6648 
6649 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
6650   template <typename Dispatch>
releaseDisplayEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const6651   VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
6652   {
6653     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6654     return static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
6655   }
6656 #else
6657   template <typename Dispatch>
releaseDisplayEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const6658   VULKAN_HPP_INLINE void PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6659   {
6660     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6661 
6662 
6663 
6664     d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) );
6665 
6666 
6667 
6668   }
6669 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
6670 
6671   //=== VK_EXT_display_surface_counter ===
6672 
6673 
6674   template <typename Dispatch>
getSurfaceCapabilities2EXT(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities,Dispatch const & d) const6675   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
6676   {
6677     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6678     return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT *>( pSurfaceCapabilities ) ) );
6679   }
6680 
6681 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6682   template <typename Dispatch>
getSurfaceCapabilities2EXT(VULKAN_HPP_NAMESPACE::SurfaceKHR surface,Dispatch const & d) const6683   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
6684   {
6685     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6686 
6687 
6688     VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities;
6689     VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT *>( &surfaceCapabilities ) );
6690     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" );
6691 
6692     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceCapabilities );
6693   }
6694 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6695 
6696   //=== VK_EXT_display_control ===
6697 
6698 
6699   template <typename Dispatch>
displayPowerControlEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo,Dispatch const & d) const6700   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
6701   {
6702     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6703     return static_cast<Result>( d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( pDisplayPowerInfo ) ) );
6704   }
6705 
6706 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6707   template <typename Dispatch>
displayPowerControlEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo,Dispatch const & d) const6708   VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo, Dispatch const & d ) const
6709   {
6710     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6711 
6712 
6713 
6714     VkResult result = d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( &displayPowerInfo ) );
6715     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" );
6716 
6717     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
6718   }
6719 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6720 
6721 
6722   template <typename Dispatch>
registerEventEXT(const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Fence * pFence,Dispatch const & d) const6723   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Fence * pFence, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
6724   {
6725     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6726     return static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT *>( pDeviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkFence *>( pFence ) ) );
6727   }
6728 
6729 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6730   template <typename Dispatch>
registerEventEXT(const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6731   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
6732   {
6733     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6734 
6735 
6736     VULKAN_HPP_NAMESPACE::Fence fence;
6737     VkResult result = d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFence *>( &fence ) );
6738     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" );
6739 
6740     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fence );
6741   }
6742 
6743 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
6744   template <typename Dispatch>
registerEventEXTUnique(const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6745   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type Device::registerEventEXTUnique( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
6746   {
6747     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6748 
6749 
6750     VULKAN_HPP_NAMESPACE::Fence fence;
6751     VkResult result = d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFence *>( &fence ) );
6752     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique" );
6753 
6754     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
6755   }
6756 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
6757 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6758 
6759 
6760   template <typename Dispatch>
registerDisplayEventEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::Fence * pFence,Dispatch const & d) const6761   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Fence * pFence, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
6762   {
6763     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6764     return static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT *>( pDisplayEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkFence *>( pFence ) ) );
6765   }
6766 
6767 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6768   template <typename Dispatch>
registerDisplayEventEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6769   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
6770   {
6771     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6772 
6773 
6774     VULKAN_HPP_NAMESPACE::Fence fence;
6775     VkResult result = d.vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFence *>( &fence ) );
6776     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" );
6777 
6778     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fence );
6779   }
6780 
6781 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
6782   template <typename Dispatch>
registerDisplayEventEXTUnique(VULKAN_HPP_NAMESPACE::DisplayKHR display,const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const6783   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
6784   {
6785     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6786 
6787 
6788     VULKAN_HPP_NAMESPACE::Fence fence;
6789     VkResult result = d.vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFence *>( &fence ) );
6790     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique" );
6791 
6792     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
6793   }
6794 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
6795 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6796 
6797 
6798   template <typename Dispatch>
getSwapchainCounterEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,uint64_t * pCounterValue,Dispatch const & d) const6799   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, uint64_t * pCounterValue, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
6800   {
6801     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6802     return static_cast<Result>( d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), pCounterValue ) );
6803   }
6804 
6805 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6806   template <typename Dispatch>
getSwapchainCounterEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,Dispatch const & d) const6807   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const & d ) const
6808   {
6809     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6810 
6811 
6812     uint64_t counterValue;
6813     VkResult result = d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), &counterValue );
6814     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" );
6815 
6816     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), counterValue );
6817   }
6818 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6819 
6820   //=== VK_EXT_discard_rectangles ===
6821 
6822 
6823   template <typename Dispatch>
setDiscardRectangleEXT(uint32_t firstDiscardRectangle,uint32_t discardRectangleCount,const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles,Dispatch const & d) const6824   VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
6825   {
6826     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6827     d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast<const VkRect2D *>( pDiscardRectangles ) );
6828   }
6829 
6830 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6831   template <typename Dispatch>
setDiscardRectangleEXT(uint32_t firstDiscardRectangle,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles,Dispatch const & d) const6832   VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
6833   {
6834     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6835 
6836 
6837 
6838     d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangles.size(), reinterpret_cast<const VkRect2D *>( discardRectangles.data() ) );
6839 
6840 
6841 
6842   }
6843 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6844 
6845 
6846   template <typename Dispatch>
setDiscardRectangleEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 discardRectangleEnable,Dispatch const & d) const6847   VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 discardRectangleEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
6848   {
6849     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6850     d.vkCmdSetDiscardRectangleEnableEXT( m_commandBuffer, static_cast<VkBool32>( discardRectangleEnable ) );
6851   }
6852 
6853 
6854   template <typename Dispatch>
setDiscardRectangleModeEXT(VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode,Dispatch const & d) const6855   VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleModeEXT( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
6856   {
6857     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6858     d.vkCmdSetDiscardRectangleModeEXT( m_commandBuffer, static_cast<VkDiscardRectangleModeEXT>( discardRectangleMode ) );
6859   }
6860 
6861   //=== VK_EXT_hdr_metadata ===
6862 
6863 
6864   template <typename Dispatch>
setHdrMetadataEXT(uint32_t swapchainCount,const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata,Dispatch const & d) const6865   VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
6866   {
6867     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6868     d.vkSetHdrMetadataEXT( m_device, swapchainCount, reinterpret_cast<const VkSwapchainKHR *>( pSwapchains ), reinterpret_cast<const VkHdrMetadataEXT *>( pMetadata ) );
6869   }
6870 
6871 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6872   template <typename Dispatch>
setHdrMetadataEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata,Dispatch const & d) const6873   VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
6874   {
6875     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6876 #ifdef VULKAN_HPP_NO_EXCEPTIONS
6877     VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() );
6878 #else
6879     if ( swapchains.size() != metadata.size() )
6880   {
6881     throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" );
6882   }
6883 #endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
6884 
6885 
6886     d.vkSetHdrMetadataEXT( m_device, swapchains.size(), reinterpret_cast<const VkSwapchainKHR *>( swapchains.data() ), reinterpret_cast<const VkHdrMetadataEXT *>( metadata.data() ) );
6887 
6888 
6889 
6890   }
6891 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6892 
6893   //=== VK_KHR_shared_presentable_image ===
6894 
6895 
6896 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
6897   template <typename Dispatch>
getSwapchainStatusKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const6898   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
6899   {
6900     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6901     return static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
6902   }
6903 #else
6904   template <typename Dispatch>
getSwapchainStatusKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,Dispatch const & d) const6905   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
6906   {
6907     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6908 
6909 
6910 
6911     VkResult result = d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) );
6912     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
6913 
6914     return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
6915   }
6916 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
6917 
6918   //=== VK_KHR_external_fence_fd ===
6919 
6920 
6921   template <typename Dispatch>
importFenceFdKHR(const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo,Dispatch const & d) const6922   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
6923   {
6924     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6925     return static_cast<Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( pImportFenceFdInfo ) ) );
6926   }
6927 
6928 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6929   template <typename Dispatch>
importFenceFdKHR(const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo,Dispatch const & d) const6930   VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d ) const
6931   {
6932     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6933 
6934 
6935 
6936     VkResult result = d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( &importFenceFdInfo ) );
6937     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" );
6938 
6939     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
6940   }
6941 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6942 
6943 
6944   template <typename Dispatch>
getFenceFdKHR(const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo,int * pFd,Dispatch const & d) const6945   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo, int * pFd, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
6946   {
6947     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6948     return static_cast<Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
6949   }
6950 
6951 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6952   template <typename Dispatch>
getFenceFdKHR(const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo,Dispatch const & d) const6953   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo, Dispatch const & d ) const
6954   {
6955     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6956 
6957 
6958     int fd;
6959     VkResult result = d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( &getFdInfo ), &fd );
6960     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" );
6961 
6962     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fd );
6963   }
6964 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
6965 
6966   //=== VK_KHR_performance_query ===
6967 
6968 
6969   template <typename Dispatch>
enumerateQueueFamilyPerformanceQueryCountersKHR(uint32_t queueFamilyIndex,uint32_t * pCounterCount,VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters,VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions,Dispatch const & d) const6970   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, uint32_t * pCounterCount, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
6971   {
6972     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6973     return static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, pCounterCount, reinterpret_cast<VkPerformanceCounterKHR *>( pCounters ), reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( pCounterDescriptions ) ) );
6974   }
6975 
6976 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6977   template <typename PerformanceCounterKHRAllocator, typename PerformanceCounterDescriptionKHRAllocator, typename Dispatch>
enumerateQueueFamilyPerformanceQueryCountersKHR(uint32_t queueFamilyIndex,Dispatch const & d) const6978   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const
6979   {
6980     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
6981 
6982 
6983     std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>> data_;
6984     std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data_.first;
6985     std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = data_.second;
6986     uint32_t counterCount;
6987     VkResult result;
6988     do
6989     {
6990       result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr );
6991       if ( ( result == VK_SUCCESS ) && counterCount )
6992       {
6993         counters.resize( counterCount );
6994 counterDescriptions.resize( counterCount );
6995         result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ), reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) );
6996       }
6997     } while ( result == VK_INCOMPLETE );
6998     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
6999     VULKAN_HPP_ASSERT( counterCount <= counters.size() );
7000     if ( counterCount < counters.size() )
7001     {
7002       counters.resize( counterCount );
7003 counterDescriptions.resize( counterCount );
7004     }
7005     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data_ );
7006   }
7007 
7008   template <typename PerformanceCounterKHRAllocator, typename PerformanceCounterDescriptionKHRAllocator, typename Dispatch, typename B1, typename B2, typename std::enable_if<std::is_same<typename B1::value_type, PerformanceCounterKHR>::value && std::is_same<typename B2::value_type, PerformanceCounterDescriptionKHR>::value, int>::type>
enumerateQueueFamilyPerformanceQueryCountersKHR(uint32_t queueFamilyIndex,PerformanceCounterKHRAllocator & performanceCounterKHRAllocator,PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator,Dispatch const & d) const7009   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, PerformanceCounterKHRAllocator & performanceCounterKHRAllocator, PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator, Dispatch const & d ) const
7010   {
7011     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7012 
7013 
7014     std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>> data_( std::piecewise_construct, std::forward_as_tuple( performanceCounterKHRAllocator ), std::forward_as_tuple( performanceCounterDescriptionKHRAllocator ) );
7015     std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data_.first;
7016     std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = data_.second;
7017     uint32_t counterCount;
7018     VkResult result;
7019     do
7020     {
7021       result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr );
7022       if ( ( result == VK_SUCCESS ) && counterCount )
7023       {
7024         counters.resize( counterCount );
7025 counterDescriptions.resize( counterCount );
7026         result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ), reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) );
7027       }
7028     } while ( result == VK_INCOMPLETE );
7029     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
7030     VULKAN_HPP_ASSERT( counterCount <= counters.size() );
7031     if ( counterCount < counters.size() )
7032     {
7033       counters.resize( counterCount );
7034 counterDescriptions.resize( counterCount );
7035     }
7036     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data_ );
7037   }
7038 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7039 
7040 
7041   template <typename Dispatch>
getQueueFamilyPerformanceQueryPassesKHR(const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo,uint32_t * pNumPasses,Dispatch const & d) const7042   VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, uint32_t * pNumPasses, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
7043   {
7044     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7045     d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( pPerformanceQueryCreateInfo ), pNumPasses );
7046   }
7047 
7048 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7049   template <typename Dispatch>
getQueueFamilyPerformanceQueryPassesKHR(const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo,Dispatch const & d) const7050   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7051   {
7052     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7053 
7054 
7055     uint32_t numPasses;
7056     d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( &performanceQueryCreateInfo ), &numPasses );
7057 
7058 
7059     return numPasses;
7060   }
7061 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7062 
7063 
7064   template <typename Dispatch>
acquireProfilingLockKHR(const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo,Dispatch const & d) const7065   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
7066   {
7067     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7068     return static_cast<Result>( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( pInfo ) ) );
7069   }
7070 
7071 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7072   template <typename Dispatch>
acquireProfilingLockKHR(const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info,Dispatch const & d) const7073   VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info, Dispatch const & d ) const
7074   {
7075     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7076 
7077 
7078 
7079     VkResult result = d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( &info ) );
7080     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" );
7081 
7082     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
7083   }
7084 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7085 
7086 
7087   template <typename Dispatch>
releaseProfilingLockKHR(Dispatch const & d) const7088   VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
7089   {
7090     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7091     d.vkReleaseProfilingLockKHR( m_device );
7092   }
7093 
7094   //=== VK_KHR_get_surface_capabilities2 ===
7095 
7096 
7097   template <typename Dispatch>
getSurfaceCapabilities2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities,Dispatch const & d) const7098   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
7099   {
7100     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7101     return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), reinterpret_cast<VkSurfaceCapabilities2KHR *>( pSurfaceCapabilities ) ) );
7102   }
7103 
7104 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7105   template <typename Dispatch>
getSurfaceCapabilities2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const7106   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
7107   {
7108     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7109 
7110 
7111     VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities;
7112     VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) );
7113     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
7114 
7115     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceCapabilities );
7116   }
7117 
7118   template <typename X, typename Y, typename... Z, typename Dispatch>
getSurfaceCapabilities2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const7119   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
7120   {
7121     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7122 
7123 
7124     StructureChain<X, Y, Z...> structureChain;
7125     VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>();
7126     VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) );
7127     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
7128 
7129     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
7130   }
7131 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7132 
7133 
7134   template <typename Dispatch>
getSurfaceFormats2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,uint32_t * pSurfaceFormatCount,VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats,Dispatch const & d) const7135   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, uint32_t * pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
7136   {
7137     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7138     return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR *>( pSurfaceFormats ) ) );
7139   }
7140 
7141 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7142   template <typename SurfaceFormat2KHRAllocator, typename Dispatch>
getSurfaceFormats2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const7143   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
7144   {
7145     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7146 
7147 
7148     std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats;
7149     uint32_t surfaceFormatCount;
7150     VkResult result;
7151     do
7152     {
7153       result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr );
7154       if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
7155       {
7156         surfaceFormats.resize( surfaceFormatCount );
7157         result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
7158       }
7159     } while ( result == VK_INCOMPLETE );
7160     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
7161     VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
7162     if ( surfaceFormatCount < surfaceFormats.size() )
7163     {
7164       surfaceFormats.resize( surfaceFormatCount );
7165     }
7166     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats );
7167   }
7168 
7169   template <typename SurfaceFormat2KHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, SurfaceFormat2KHR>::value, int>::type>
getSurfaceFormats2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator,Dispatch const & d) const7170   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator, Dispatch const & d ) const
7171   {
7172     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7173 
7174 
7175     std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats( surfaceFormat2KHRAllocator );
7176     uint32_t surfaceFormatCount;
7177     VkResult result;
7178     do
7179     {
7180       result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr );
7181       if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
7182       {
7183         surfaceFormats.resize( surfaceFormatCount );
7184         result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
7185       }
7186     } while ( result == VK_INCOMPLETE );
7187     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
7188     VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
7189     if ( surfaceFormatCount < surfaceFormats.size() )
7190     {
7191       surfaceFormats.resize( surfaceFormatCount );
7192     }
7193     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats );
7194   }
7195 
7196   template <typename StructureChain, typename StructureChainAllocator, typename Dispatch>
getSurfaceFormats2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,Dispatch const & d) const7197   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
7198   {
7199     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7200 
7201 
7202     std::vector<StructureChain, StructureChainAllocator> structureChains;
7203     std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats;
7204     uint32_t surfaceFormatCount;
7205     VkResult result;
7206     do
7207     {
7208       result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr );
7209       if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
7210       {
7211         structureChains.resize( surfaceFormatCount );
7212         surfaceFormats.resize( surfaceFormatCount );
7213         for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
7214         {
7215           surfaceFormats[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>().pNext;
7216         }
7217         result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
7218       }
7219     } while ( result == VK_INCOMPLETE );
7220     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
7221     VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
7222       if ( surfaceFormatCount < surfaceFormats.size() )
7223       {
7224         structureChains.resize( surfaceFormatCount );
7225       }
7226       for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
7227       {
7228         structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>() = surfaceFormats[i];
7229       }
7230     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChains );
7231   }
7232 
7233   template <typename StructureChain, typename StructureChainAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, StructureChain>::value, int>::type>
getSurfaceFormats2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,StructureChainAllocator & structureChainAllocator,Dispatch const & d) const7234   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const
7235   {
7236     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7237 
7238 
7239     std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator );
7240     std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats;
7241     uint32_t surfaceFormatCount;
7242     VkResult result;
7243     do
7244     {
7245       result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr );
7246       if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
7247       {
7248         structureChains.resize( surfaceFormatCount );
7249         surfaceFormats.resize( surfaceFormatCount );
7250         for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
7251         {
7252           surfaceFormats[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>().pNext;
7253         }
7254         result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
7255       }
7256     } while ( result == VK_INCOMPLETE );
7257     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
7258     VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
7259       if ( surfaceFormatCount < surfaceFormats.size() )
7260       {
7261         structureChains.resize( surfaceFormatCount );
7262       }
7263       for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
7264       {
7265         structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>() = surfaceFormats[i];
7266       }
7267     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChains );
7268   }
7269 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7270 
7271   //=== VK_KHR_get_display_properties2 ===
7272 
7273 
7274   template <typename Dispatch>
getDisplayProperties2KHR(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties,Dispatch const & d) const7275   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
7276   {
7277     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7278     return static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( pProperties ) ) );
7279   }
7280 
7281 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7282   template <typename DisplayProperties2KHRAllocator, typename Dispatch>
getDisplayProperties2KHR(Dispatch const & d) const7283   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type PhysicalDevice::getDisplayProperties2KHR( Dispatch const & d ) const
7284   {
7285     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7286 
7287 
7288     std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties;
7289     uint32_t propertyCount;
7290     VkResult result;
7291     do
7292     {
7293       result = d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr );
7294       if ( ( result == VK_SUCCESS ) && propertyCount )
7295       {
7296         properties.resize( propertyCount );
7297         result = d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) );
7298       }
7299     } while ( result == VK_INCOMPLETE );
7300     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" );
7301     VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
7302     if ( propertyCount < properties.size() )
7303     {
7304       properties.resize( propertyCount );
7305     }
7306     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
7307   }
7308 
7309   template <typename DisplayProperties2KHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, DisplayProperties2KHR>::value, int>::type>
getDisplayProperties2KHR(DisplayProperties2KHRAllocator & displayProperties2KHRAllocator,Dispatch const & d) const7310   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type PhysicalDevice::getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator, Dispatch const & d ) const
7311   {
7312     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7313 
7314 
7315     std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties( displayProperties2KHRAllocator );
7316     uint32_t propertyCount;
7317     VkResult result;
7318     do
7319     {
7320       result = d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr );
7321       if ( ( result == VK_SUCCESS ) && propertyCount )
7322       {
7323         properties.resize( propertyCount );
7324         result = d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) );
7325       }
7326     } while ( result == VK_INCOMPLETE );
7327     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" );
7328     VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
7329     if ( propertyCount < properties.size() )
7330     {
7331       properties.resize( propertyCount );
7332     }
7333     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
7334   }
7335 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7336 
7337 
7338   template <typename Dispatch>
getDisplayPlaneProperties2KHR(uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties,Dispatch const & d) const7339   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
7340   {
7341     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7342     return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( pProperties ) ) );
7343   }
7344 
7345 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7346   template <typename DisplayPlaneProperties2KHRAllocator, typename Dispatch>
getDisplayPlaneProperties2KHR(Dispatch const & d) const7347   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type PhysicalDevice::getDisplayPlaneProperties2KHR( Dispatch const & d ) const
7348   {
7349     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7350 
7351 
7352     std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties;
7353     uint32_t propertyCount;
7354     VkResult result;
7355     do
7356     {
7357       result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr );
7358       if ( ( result == VK_SUCCESS ) && propertyCount )
7359       {
7360         properties.resize( propertyCount );
7361         result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) );
7362       }
7363     } while ( result == VK_INCOMPLETE );
7364     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" );
7365     VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
7366     if ( propertyCount < properties.size() )
7367     {
7368       properties.resize( propertyCount );
7369     }
7370     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
7371   }
7372 
7373   template <typename DisplayPlaneProperties2KHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, DisplayPlaneProperties2KHR>::value, int>::type>
getDisplayPlaneProperties2KHR(DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator,Dispatch const & d) const7374   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type PhysicalDevice::getDisplayPlaneProperties2KHR( DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, Dispatch const & d ) const
7375   {
7376     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7377 
7378 
7379     std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties( displayPlaneProperties2KHRAllocator );
7380     uint32_t propertyCount;
7381     VkResult result;
7382     do
7383     {
7384       result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr );
7385       if ( ( result == VK_SUCCESS ) && propertyCount )
7386       {
7387         properties.resize( propertyCount );
7388         result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) );
7389       }
7390     } while ( result == VK_INCOMPLETE );
7391     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" );
7392     VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
7393     if ( propertyCount < properties.size() )
7394     {
7395       properties.resize( propertyCount );
7396     }
7397     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
7398   }
7399 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7400 
7401 
7402   template <typename Dispatch>
getDisplayModeProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,uint32_t * pPropertyCount,VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties,Dispatch const & d) const7403   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
7404   {
7405     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7406     return static_cast<Result>( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( pProperties ) ) );
7407   }
7408 
7409 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7410   template <typename DisplayModeProperties2KHRAllocator, typename Dispatch>
getDisplayModeProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,Dispatch const & d) const7411   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
7412   {
7413     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7414 
7415 
7416     std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties;
7417     uint32_t propertyCount;
7418     VkResult result;
7419     do
7420     {
7421       result = d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr );
7422       if ( ( result == VK_SUCCESS ) && propertyCount )
7423       {
7424         properties.resize( propertyCount );
7425         result = d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) );
7426       }
7427     } while ( result == VK_INCOMPLETE );
7428     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" );
7429     VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
7430     if ( propertyCount < properties.size() )
7431     {
7432       properties.resize( propertyCount );
7433     }
7434     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
7435   }
7436 
7437   template <typename DisplayModeProperties2KHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, DisplayModeProperties2KHR>::value, int>::type>
getDisplayModeProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayKHR display,DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator,Dispatch const & d) const7438   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator, Dispatch const & d ) const
7439   {
7440     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7441 
7442 
7443     std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties( displayModeProperties2KHRAllocator );
7444     uint32_t propertyCount;
7445     VkResult result;
7446     do
7447     {
7448       result = d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr );
7449       if ( ( result == VK_SUCCESS ) && propertyCount )
7450       {
7451         properties.resize( propertyCount );
7452         result = d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) );
7453       }
7454     } while ( result == VK_INCOMPLETE );
7455     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" );
7456     VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
7457     if ( propertyCount < properties.size() )
7458     {
7459       properties.resize( propertyCount );
7460     }
7461     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
7462   }
7463 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7464 
7465 
7466   template <typename Dispatch>
getDisplayPlaneCapabilities2KHR(const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo,VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities,Dispatch const & d) const7467   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
7468   {
7469     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7470     return static_cast<Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( pDisplayPlaneInfo ), reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( pCapabilities ) ) );
7471   }
7472 
7473 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7474   template <typename Dispatch>
getDisplayPlaneCapabilities2KHR(const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo,Dispatch const & d) const7475   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const & d ) const
7476   {
7477     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7478 
7479 
7480     VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities;
7481     VkResult result = d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( &displayPlaneInfo ), reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( &capabilities ) );
7482     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" );
7483 
7484     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), capabilities );
7485   }
7486 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7487 
7488   //=== VK_EXT_debug_utils ===
7489 
7490 
7491   template <typename Dispatch>
setDebugUtilsObjectNameEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo,Dispatch const & d) const7492   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
7493   {
7494     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7495     return static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( pNameInfo ) ) );
7496   }
7497 
7498 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7499   template <typename Dispatch>
setDebugUtilsObjectNameEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo,Dispatch const & d) const7500   VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const & d ) const
7501   {
7502     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7503 
7504 
7505 
7506     VkResult result = d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( &nameInfo ) );
7507     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" );
7508 
7509     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
7510   }
7511 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7512 
7513 
7514   template <typename Dispatch>
setDebugUtilsObjectTagEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo,Dispatch const & d) const7515   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
7516   {
7517     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7518     return static_cast<Result>( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( pTagInfo ) ) );
7519   }
7520 
7521 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7522   template <typename Dispatch>
setDebugUtilsObjectTagEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo,Dispatch const & d) const7523   VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const & d ) const
7524   {
7525     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7526 
7527 
7528 
7529     VkResult result = d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( &tagInfo ) );
7530     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" );
7531 
7532     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
7533   }
7534 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7535 
7536 
7537   template <typename Dispatch>
beginDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,Dispatch const & d) const7538   VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
7539   {
7540     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7541     d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
7542   }
7543 
7544 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7545   template <typename Dispatch>
beginDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,Dispatch const & d) const7546   VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7547   {
7548     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7549 
7550 
7551 
7552     d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
7553 
7554 
7555 
7556   }
7557 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7558 
7559 
7560   template <typename Dispatch>
endDebugUtilsLabelEXT(Dispatch const & d) const7561   VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
7562   {
7563     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7564     d.vkQueueEndDebugUtilsLabelEXT( m_queue );
7565   }
7566 
7567 
7568   template <typename Dispatch>
insertDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,Dispatch const & d) const7569   VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
7570   {
7571     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7572     d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
7573   }
7574 
7575 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7576   template <typename Dispatch>
insertDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,Dispatch const & d) const7577   VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7578   {
7579     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7580 
7581 
7582 
7583     d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
7584 
7585 
7586 
7587   }
7588 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7589 
7590 
7591   template <typename Dispatch>
beginDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,Dispatch const & d) const7592   VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
7593   {
7594     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7595     d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
7596   }
7597 
7598 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7599   template <typename Dispatch>
beginDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,Dispatch const & d) const7600   VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7601   {
7602     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7603 
7604 
7605 
7606     d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
7607 
7608 
7609 
7610   }
7611 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7612 
7613 
7614   template <typename Dispatch>
endDebugUtilsLabelEXT(Dispatch const & d) const7615   VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
7616   {
7617     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7618     d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer );
7619   }
7620 
7621 
7622   template <typename Dispatch>
insertDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,Dispatch const & d) const7623   VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
7624   {
7625     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7626     d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
7627   }
7628 
7629 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7630   template <typename Dispatch>
insertDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,Dispatch const & d) const7631   VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7632   {
7633     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7634 
7635 
7636 
7637     d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
7638 
7639 
7640 
7641   }
7642 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7643 
7644 
7645   template <typename Dispatch>
createDebugUtilsMessengerEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger,Dispatch const & d) const7646   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
7647   {
7648     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7649     return static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDebugUtilsMessengerEXT *>( pMessenger ) ) );
7650   }
7651 
7652 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7653   template <typename Dispatch>
createDebugUtilsMessengerEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const7654   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::type Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
7655   {
7656     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7657 
7658 
7659     VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger;
7660     VkResult result = d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) );
7661     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" );
7662 
7663     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), messenger );
7664   }
7665 
7666 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
7667   template <typename Dispatch>
createDebugUtilsMessengerEXTUnique(const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const7668   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>>::type Instance::createDebugUtilsMessengerEXTUnique( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
7669   {
7670     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7671 
7672 
7673     VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger;
7674     VkResult result = d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) );
7675     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique" );
7676 
7677     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>( messenger, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
7678   }
7679 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
7680 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7681 
7682 
7683   template <typename Dispatch>
destroyDebugUtilsMessengerEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const7684   VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
7685   {
7686     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7687     d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
7688   }
7689 
7690 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7691   template <typename Dispatch>
destroyDebugUtilsMessengerEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const7692   VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7693   {
7694     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7695 
7696 
7697 
7698     d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
7699 
7700 
7701 
7702   }
7703 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7704 
7705 
7706   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,Dispatch const & d) const7707   VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
7708   {
7709     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7710     d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
7711   }
7712 
7713 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7714   template <typename Dispatch>
destroy(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const7715   VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7716   {
7717     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7718 
7719 
7720 
7721     d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
7722 
7723 
7724 
7725   }
7726 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7727 
7728 
7729   template <typename Dispatch>
submitDebugUtilsMessageEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData,Dispatch const & d) const7730   VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
7731   {
7732     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7733     d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ), static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ), reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( pCallbackData ) );
7734   }
7735 
7736 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7737   template <typename Dispatch>
submitDebugUtilsMessageEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData,Dispatch const & d) const7738   VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7739   {
7740     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7741 
7742 
7743 
7744     d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ), static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ), reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( &callbackData ) );
7745 
7746 
7747 
7748   }
7749 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7750 
7751   //=== VK_EXT_sample_locations ===
7752 
7753 
7754   template <typename Dispatch>
setSampleLocationsEXT(const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo,Dispatch const & d) const7755   VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
7756   {
7757     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7758     d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT *>( pSampleLocationsInfo ) );
7759   }
7760 
7761 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7762   template <typename Dispatch>
setSampleLocationsEXT(const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo,Dispatch const & d) const7763   VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7764   {
7765     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7766 
7767 
7768 
7769     d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT *>( &sampleLocationsInfo ) );
7770 
7771 
7772 
7773   }
7774 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7775 
7776 
7777   template <typename Dispatch>
getMultisamplePropertiesEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties,Dispatch const & d) const7778   VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
7779   {
7780     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7781     d.vkGetPhysicalDeviceMultisamplePropertiesEXT( m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT *>( pMultisampleProperties ) );
7782   }
7783 
7784 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7785   template <typename Dispatch>
getMultisamplePropertiesEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,Dispatch const & d) const7786   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
7787   {
7788     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7789 
7790 
7791     VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties;
7792     d.vkGetPhysicalDeviceMultisamplePropertiesEXT( m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT *>( &multisampleProperties ) );
7793 
7794 
7795     return multisampleProperties;
7796   }
7797 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7798 
7799   //=== VK_EXT_image_drm_format_modifier ===
7800 
7801 
7802   template <typename Dispatch>
getImageDrmFormatModifierPropertiesEXT(VULKAN_HPP_NAMESPACE::Image image,VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties,Dispatch const & d) const7803   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
7804   {
7805     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7806     return static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( pProperties ) ) );
7807   }
7808 
7809 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7810   template <typename Dispatch>
getImageDrmFormatModifierPropertiesEXT(VULKAN_HPP_NAMESPACE::Image image,Dispatch const & d) const7811   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::type Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const
7812   {
7813     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7814 
7815 
7816     VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties;
7817     VkResult result = d.vkGetImageDrmFormatModifierPropertiesEXT( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( &properties ) );
7818     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" );
7819 
7820     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
7821   }
7822 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7823 
7824   //=== VK_EXT_external_memory_host ===
7825 
7826 
7827   template <typename Dispatch>
getMemoryHostPointerPropertiesEXT(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,const void * pHostPointer,VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties,Dispatch const & d) const7828   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void * pHostPointer, VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
7829   {
7830     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7831     return static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), pHostPointer, reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( pMemoryHostPointerProperties ) ) );
7832   }
7833 
7834 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7835   template <typename Dispatch>
getMemoryHostPointerPropertiesEXT(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,const void * pHostPointer,Dispatch const & d) const7836   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void * pHostPointer, Dispatch const & d ) const
7837   {
7838     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7839 
7840 
7841     VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties;
7842     VkResult result = d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), pHostPointer, reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( &memoryHostPointerProperties ) );
7843     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" );
7844 
7845     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryHostPointerProperties );
7846   }
7847 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7848 
7849   //=== VK_EXT_calibrated_timestamps ===
7850 
7851 
7852   template <typename Dispatch>
getCalibrateableTimeDomainsEXT(uint32_t * pTimeDomainCount,VULKAN_HPP_NAMESPACE::TimeDomainEXT * pTimeDomains,Dispatch const & d) const7853   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount, VULKAN_HPP_NAMESPACE::TimeDomainEXT * pTimeDomains, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
7854   {
7855     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7856     return static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, pTimeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( pTimeDomains ) ) );
7857   }
7858 
7859 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7860   template <typename TimeDomainEXTAllocator, typename Dispatch>
getCalibrateableTimeDomainsEXT(Dispatch const & d) const7861   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT, TimeDomainEXTAllocator>>::type PhysicalDevice::getCalibrateableTimeDomainsEXT( Dispatch const & d ) const
7862   {
7863     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7864 
7865 
7866     std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT, TimeDomainEXTAllocator> timeDomains;
7867     uint32_t timeDomainCount;
7868     VkResult result;
7869     do
7870     {
7871       result = d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr );
7872       if ( ( result == VK_SUCCESS ) && timeDomainCount )
7873       {
7874         timeDomains.resize( timeDomainCount );
7875         result = d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( timeDomains.data() ) );
7876       }
7877     } while ( result == VK_INCOMPLETE );
7878     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
7879     VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
7880     if ( timeDomainCount < timeDomains.size() )
7881     {
7882       timeDomains.resize( timeDomainCount );
7883     }
7884     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), timeDomains );
7885   }
7886 
7887   template <typename TimeDomainEXTAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, TimeDomainEXT>::value, int>::type>
getCalibrateableTimeDomainsEXT(TimeDomainEXTAllocator & timeDomainEXTAllocator,Dispatch const & d) const7888   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT, TimeDomainEXTAllocator>>::type PhysicalDevice::getCalibrateableTimeDomainsEXT( TimeDomainEXTAllocator & timeDomainEXTAllocator, Dispatch const & d ) const
7889   {
7890     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7891 
7892 
7893     std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT, TimeDomainEXTAllocator> timeDomains( timeDomainEXTAllocator );
7894     uint32_t timeDomainCount;
7895     VkResult result;
7896     do
7897     {
7898       result = d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr );
7899       if ( ( result == VK_SUCCESS ) && timeDomainCount )
7900       {
7901         timeDomains.resize( timeDomainCount );
7902         result = d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( timeDomains.data() ) );
7903       }
7904     } while ( result == VK_INCOMPLETE );
7905     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
7906     VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
7907     if ( timeDomainCount < timeDomains.size() )
7908     {
7909       timeDomains.resize( timeDomainCount );
7910     }
7911     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), timeDomains );
7912   }
7913 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7914 
7915 
7916   template <typename Dispatch>
getCalibratedTimestampsEXT(uint32_t timestampCount,const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT * pTimestampInfos,uint64_t * pTimestamps,uint64_t * pMaxDeviation,Dispatch const & d) const7917   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsEXT( uint32_t timestampCount, const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT * pTimestampInfos, uint64_t * pTimestamps, uint64_t * pMaxDeviation, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
7918   {
7919     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7920     return static_cast<Result>( d.vkGetCalibratedTimestampsEXT( m_device, timestampCount, reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( pTimestampInfos ), pTimestamps, pMaxDeviation ) );
7921   }
7922 
7923 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7924   template <typename Uint64_tAllocator, typename Dispatch>
getCalibratedTimestampsEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,Dispatch const & d) const7925   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type Device::getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos, Dispatch const & d ) const
7926   {
7927     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7928 
7929 
7930     std::pair<std::vector<uint64_t, Uint64_tAllocator>,uint64_t> data_( std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) );
7931     std::vector<uint64_t, Uint64_tAllocator> & timestamps = data_.first;
7932     uint64_t & maxDeviation = data_.second;
7933     VkResult result = d.vkGetCalibratedTimestampsEXT( m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ), timestamps.data(), &maxDeviation );
7934     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
7935 
7936     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data_ );
7937   }
7938 
7939   template <typename Uint64_tAllocator, typename Dispatch, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, uint64_t>::value, int>::type>
getCalibratedTimestampsEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,Uint64_tAllocator & uint64_tAllocator,Dispatch const & d) const7940   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type Device::getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos, Uint64_tAllocator & uint64_tAllocator, Dispatch const & d ) const
7941   {
7942     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7943 
7944 
7945     std::pair<std::vector<uint64_t, Uint64_tAllocator>,uint64_t> data_( std::piecewise_construct, std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ), std::forward_as_tuple( 0 ) );
7946     std::vector<uint64_t, Uint64_tAllocator> & timestamps = data_.first;
7947     uint64_t & maxDeviation = data_.second;
7948     VkResult result = d.vkGetCalibratedTimestampsEXT( m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ), timestamps.data(), &maxDeviation );
7949     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
7950 
7951     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data_ );
7952   }
7953 
7954   template <typename Dispatch>
getCalibratedTimestampEXT(const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT & timestampInfo,Dispatch const & d) const7955   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<uint64_t, uint64_t>>::type Device::getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT & timestampInfo, Dispatch const & d ) const
7956   {
7957     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7958 
7959 
7960     std::pair<uint64_t,uint64_t> data_;
7961     uint64_t & timestamp = data_.first;
7962     uint64_t & maxDeviation = data_.second;
7963     VkResult result = d.vkGetCalibratedTimestampsEXT( m_device, 1, reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( &timestampInfo ), &timestamp, &maxDeviation );
7964     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" );
7965 
7966     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data_ );
7967   }
7968 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
7969 
7970   //=== VK_KHR_fragment_shading_rate ===
7971 
7972 
7973   template <typename Dispatch>
getFragmentShadingRatesKHR(uint32_t * pFragmentShadingRateCount,VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates,Dispatch const & d) const7974   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getFragmentShadingRatesKHR( uint32_t * pFragmentShadingRateCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
7975   {
7976     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7977     return static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, pFragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( pFragmentShadingRates ) ) );
7978   }
7979 
7980 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7981   template <typename PhysicalDeviceFragmentShadingRateKHRAllocator, typename Dispatch>
getFragmentShadingRatesKHR(Dispatch const & d) const7982   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type PhysicalDevice::getFragmentShadingRatesKHR( Dispatch const & d ) const
7983   {
7984     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
7985 
7986 
7987     std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator> fragmentShadingRates;
7988     uint32_t fragmentShadingRateCount;
7989     VkResult result;
7990     do
7991     {
7992       result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr );
7993       if ( ( result == VK_SUCCESS ) && fragmentShadingRateCount )
7994       {
7995         fragmentShadingRates.resize( fragmentShadingRateCount );
7996         result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) );
7997       }
7998     } while ( result == VK_INCOMPLETE );
7999     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" );
8000     VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
8001     if ( fragmentShadingRateCount < fragmentShadingRates.size() )
8002     {
8003       fragmentShadingRates.resize( fragmentShadingRateCount );
8004     }
8005     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fragmentShadingRates );
8006   }
8007 
8008   template <typename PhysicalDeviceFragmentShadingRateKHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceFragmentShadingRateKHR>::value, int>::type>
getFragmentShadingRatesKHR(PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator,Dispatch const & d) const8009   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type PhysicalDevice::getFragmentShadingRatesKHR( PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator, Dispatch const & d ) const
8010   {
8011     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8012 
8013 
8014     std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator> fragmentShadingRates( physicalDeviceFragmentShadingRateKHRAllocator );
8015     uint32_t fragmentShadingRateCount;
8016     VkResult result;
8017     do
8018     {
8019       result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr );
8020       if ( ( result == VK_SUCCESS ) && fragmentShadingRateCount )
8021       {
8022         fragmentShadingRates.resize( fragmentShadingRateCount );
8023         result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) );
8024       }
8025     } while ( result == VK_INCOMPLETE );
8026     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" );
8027     VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
8028     if ( fragmentShadingRateCount < fragmentShadingRates.size() )
8029     {
8030       fragmentShadingRates.resize( fragmentShadingRateCount );
8031     }
8032     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fragmentShadingRates );
8033   }
8034 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8035 
8036 
8037   template <typename Dispatch>
setFragmentShadingRateKHR(const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize,const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],Dispatch const & d) const8038   VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8039   {
8040     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8041     d.vkCmdSetFragmentShadingRateKHR( m_commandBuffer, reinterpret_cast<const VkExtent2D *>( pFragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
8042   }
8043 
8044 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8045   template <typename Dispatch>
setFragmentShadingRateKHR(const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize,const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],Dispatch const & d) const8046   VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8047   {
8048     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8049 
8050 
8051 
8052     d.vkCmdSetFragmentShadingRateKHR( m_commandBuffer, reinterpret_cast<const VkExtent2D *>( &fragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
8053 
8054 
8055 
8056   }
8057 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8058 
8059   //=== VK_EXT_headless_surface ===
8060 
8061 
8062   template <typename Dispatch>
createHeadlessSurfaceEXT(const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,Dispatch const & d) const8063   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8064   {
8065     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8066     return static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
8067   }
8068 
8069 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8070   template <typename Dispatch>
createHeadlessSurfaceEXT(const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const8071   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
8072   {
8073     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8074 
8075 
8076     VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
8077     VkResult result = d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
8078     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" );
8079 
8080     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
8081   }
8082 
8083 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
8084   template <typename Dispatch>
createHeadlessSurfaceEXTUnique(const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const8085   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createHeadlessSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
8086   {
8087     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8088 
8089 
8090     VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
8091     VkResult result = d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) );
8092     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique" );
8093 
8094     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
8095   }
8096 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
8097 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8098 
8099   //=== VK_EXT_line_rasterization ===
8100 
8101 
8102   template <typename Dispatch>
setLineStippleEXT(uint32_t lineStippleFactor,uint16_t lineStipplePattern,Dispatch const & d) const8103   VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8104   {
8105     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8106     d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern );
8107   }
8108 
8109   //=== VK_EXT_extended_dynamic_state ===
8110 
8111 
8112   template <typename Dispatch>
setCullModeEXT(VULKAN_HPP_NAMESPACE::CullModeFlags cullMode,Dispatch const & d) const8113   VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8114   {
8115     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8116     d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) );
8117   }
8118 
8119 
8120   template <typename Dispatch>
setFrontFaceEXT(VULKAN_HPP_NAMESPACE::FrontFace frontFace,Dispatch const & d) const8121   VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8122   {
8123     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8124     d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) );
8125   }
8126 
8127 
8128   template <typename Dispatch>
setPrimitiveTopologyEXT(VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology,Dispatch const & d) const8129   VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8130   {
8131     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8132     d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) );
8133   }
8134 
8135 
8136   template <typename Dispatch>
setViewportWithCountEXT(uint32_t viewportCount,const VULKAN_HPP_NAMESPACE::Viewport * pViewports,Dispatch const & d) const8137   VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport * pViewports, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8138   {
8139     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8140     d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
8141   }
8142 
8143 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8144   template <typename Dispatch>
setViewportWithCountEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,Dispatch const & d) const8145   VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8146   {
8147     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8148 
8149 
8150 
8151     d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
8152 
8153 
8154 
8155   }
8156 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8157 
8158 
8159   template <typename Dispatch>
setScissorWithCountEXT(uint32_t scissorCount,const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,Dispatch const & d) const8160   VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8161   {
8162     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8163     d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
8164   }
8165 
8166 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8167   template <typename Dispatch>
setScissorWithCountEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,Dispatch const & d) const8168   VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8169   {
8170     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8171 
8172 
8173 
8174     d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
8175 
8176 
8177 
8178   }
8179 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8180 
8181 
8182   template <typename Dispatch>
bindVertexBuffers2EXT(uint32_t firstBinding,uint32_t bindingCount,const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides,Dispatch const & d) const8183   VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8184   {
8185     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8186     d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), reinterpret_cast<const VkDeviceSize *>( pOffsets ), reinterpret_cast<const VkDeviceSize *>( pSizes ), reinterpret_cast<const VkDeviceSize *>( pStrides ) );
8187   }
8188 
8189 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8190   template <typename Dispatch>
bindVertexBuffers2EXT(uint32_t firstBinding,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides,Dispatch const & d) const8191   VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
8192   {
8193     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8194 #ifdef VULKAN_HPP_NO_EXCEPTIONS
8195     VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
8196     VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
8197     VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() );
8198 #else
8199     if ( buffers.size() != offsets.size() )
8200   {
8201     throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" );
8202   }
8203     if ( !sizes.empty() && buffers.size() != sizes.size() )
8204   {
8205     throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" );
8206   }
8207     if ( !strides.empty() && buffers.size() != strides.size() )
8208   {
8209     throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" );
8210   }
8211 #endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
8212 
8213 
8214     d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, firstBinding, buffers.size(), reinterpret_cast<const VkBuffer *>( buffers.data() ), reinterpret_cast<const VkDeviceSize *>( offsets.data() ), reinterpret_cast<const VkDeviceSize *>( sizes.data() ), reinterpret_cast<const VkDeviceSize *>( strides.data() ) );
8215 
8216 
8217 
8218   }
8219 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8220 
8221 
8222   template <typename Dispatch>
setDepthTestEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable,Dispatch const & d) const8223   VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8224   {
8225     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8226     d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) );
8227   }
8228 
8229 
8230   template <typename Dispatch>
setDepthWriteEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable,Dispatch const & d) const8231   VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8232   {
8233     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8234     d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) );
8235   }
8236 
8237 
8238   template <typename Dispatch>
setDepthCompareOpEXT(VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp,Dispatch const & d) const8239   VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8240   {
8241     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8242     d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) );
8243   }
8244 
8245 
8246   template <typename Dispatch>
setDepthBoundsTestEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable,Dispatch const & d) const8247   VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8248   {
8249     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8250     d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) );
8251   }
8252 
8253 
8254   template <typename Dispatch>
setStencilTestEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable,Dispatch const & d) const8255   VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8256   {
8257     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8258     d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) );
8259   }
8260 
8261 
8262   template <typename Dispatch>
setStencilOpEXT(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,VULKAN_HPP_NAMESPACE::StencilOp failOp,VULKAN_HPP_NAMESPACE::StencilOp passOp,VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,VULKAN_HPP_NAMESPACE::CompareOp compareOp,Dispatch const & d) const8263   VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8264   {
8265     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8266     d.vkCmdSetStencilOpEXT( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), static_cast<VkStencilOp>( failOp ), static_cast<VkStencilOp>( passOp ), static_cast<VkStencilOp>( depthFailOp ), static_cast<VkCompareOp>( compareOp ) );
8267   }
8268 
8269   //=== VK_KHR_object_refresh ===
8270 
8271 
8272   template <typename Dispatch>
refreshObjectsKHR(const VULKAN_HPP_NAMESPACE::RefreshObjectListKHR * pRefreshObjects,Dispatch const & d) const8273   VULKAN_HPP_INLINE void CommandBuffer::refreshObjectsKHR( const VULKAN_HPP_NAMESPACE::RefreshObjectListKHR * pRefreshObjects, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8274   {
8275     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8276     d.vkCmdRefreshObjectsKHR( m_commandBuffer, reinterpret_cast<const VkRefreshObjectListKHR *>( pRefreshObjects ) );
8277   }
8278 
8279 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8280   template <typename Dispatch>
refreshObjectsKHR(const VULKAN_HPP_NAMESPACE::RefreshObjectListKHR & refreshObjects,Dispatch const & d) const8281   VULKAN_HPP_INLINE void CommandBuffer::refreshObjectsKHR( const VULKAN_HPP_NAMESPACE::RefreshObjectListKHR & refreshObjects, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8282   {
8283     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8284 
8285 
8286 
8287     d.vkCmdRefreshObjectsKHR( m_commandBuffer, reinterpret_cast<const VkRefreshObjectListKHR *>( &refreshObjects ) );
8288 
8289 
8290 
8291   }
8292 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8293 
8294 
8295   template <typename Dispatch>
getRefreshableObjectTypesKHR(uint32_t * pRefreshableObjectTypeCount,VULKAN_HPP_NAMESPACE::ObjectType * pRefreshableObjectTypes,Dispatch const & d) const8296   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getRefreshableObjectTypesKHR( uint32_t * pRefreshableObjectTypeCount, VULKAN_HPP_NAMESPACE::ObjectType * pRefreshableObjectTypes, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8297   {
8298     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8299     return static_cast<Result>( d.vkGetPhysicalDeviceRefreshableObjectTypesKHR( m_physicalDevice, pRefreshableObjectTypeCount, reinterpret_cast<VkObjectType *>( pRefreshableObjectTypes ) ) );
8300   }
8301 
8302 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8303   template <typename ObjectTypeAllocator, typename Dispatch>
getRefreshableObjectTypesKHR(Dispatch const & d) const8304   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ObjectType, ObjectTypeAllocator>>::type PhysicalDevice::getRefreshableObjectTypesKHR( Dispatch const & d ) const
8305   {
8306     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8307 
8308 
8309     std::vector<VULKAN_HPP_NAMESPACE::ObjectType, ObjectTypeAllocator> refreshableObjectTypes;
8310     uint32_t refreshableObjectTypeCount;
8311     VkResult result;
8312     do
8313     {
8314       result = d.vkGetPhysicalDeviceRefreshableObjectTypesKHR( m_physicalDevice, &refreshableObjectTypeCount, nullptr );
8315       if ( ( result == VK_SUCCESS ) && refreshableObjectTypeCount )
8316       {
8317         refreshableObjectTypes.resize( refreshableObjectTypeCount );
8318         result = d.vkGetPhysicalDeviceRefreshableObjectTypesKHR( m_physicalDevice, &refreshableObjectTypeCount, reinterpret_cast<VkObjectType *>( refreshableObjectTypes.data() ) );
8319       }
8320     } while ( result == VK_INCOMPLETE );
8321 
8322     VULKAN_HPP_ASSERT( refreshableObjectTypeCount <= refreshableObjectTypes.size() );
8323     if ( refreshableObjectTypeCount < refreshableObjectTypes.size() )
8324     {
8325       refreshableObjectTypes.resize( refreshableObjectTypeCount );
8326     }
8327     return refreshableObjectTypes;
8328   }
8329 
8330   template <typename ObjectTypeAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, ObjectType>::value, int>::type>
getRefreshableObjectTypesKHR(ObjectTypeAllocator & objectTypeAllocator,Dispatch const & d) const8331   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ObjectType, ObjectTypeAllocator>>::type PhysicalDevice::getRefreshableObjectTypesKHR( ObjectTypeAllocator & objectTypeAllocator, Dispatch const & d ) const
8332   {
8333     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8334 
8335 
8336     std::vector<VULKAN_HPP_NAMESPACE::ObjectType, ObjectTypeAllocator> refreshableObjectTypes( objectTypeAllocator );
8337     uint32_t refreshableObjectTypeCount;
8338     VkResult result;
8339     do
8340     {
8341       result = d.vkGetPhysicalDeviceRefreshableObjectTypesKHR( m_physicalDevice, &refreshableObjectTypeCount, nullptr );
8342       if ( ( result == VK_SUCCESS ) && refreshableObjectTypeCount )
8343       {
8344         refreshableObjectTypes.resize( refreshableObjectTypeCount );
8345         result = d.vkGetPhysicalDeviceRefreshableObjectTypesKHR( m_physicalDevice, &refreshableObjectTypeCount, reinterpret_cast<VkObjectType *>( refreshableObjectTypes.data() ) );
8346       }
8347     } while ( result == VK_INCOMPLETE );
8348 
8349     VULKAN_HPP_ASSERT( refreshableObjectTypeCount <= refreshableObjectTypes.size() );
8350     if ( refreshableObjectTypeCount < refreshableObjectTypes.size() )
8351     {
8352       refreshableObjectTypes.resize( refreshableObjectTypeCount );
8353     }
8354     return refreshableObjectTypes;
8355   }
8356 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8357 
8358   //=== VK_KHR_synchronization2 ===
8359 
8360 
8361   template <typename Dispatch>
setEvent2KHR(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,Dispatch const & d) const8362   VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8363   {
8364     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8365     d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
8366   }
8367 
8368 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8369   template <typename Dispatch>
setEvent2KHR(VULKAN_HPP_NAMESPACE::Event event,const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,Dispatch const & d) const8370   VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8371   {
8372     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8373 
8374 
8375 
8376     d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
8377 
8378 
8379 
8380   }
8381 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8382 
8383 
8384   template <typename Dispatch>
resetEvent2KHR(VULKAN_HPP_NAMESPACE::Event event,VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask,Dispatch const & d) const8385   VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8386   {
8387     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8388     d.vkCmdResetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) );
8389   }
8390 
8391 
8392   template <typename Dispatch>
waitEvents2KHR(uint32_t eventCount,const VULKAN_HPP_NAMESPACE::Event * pEvents,const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos,Dispatch const & d) const8393   VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event * pEvents, const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8394   {
8395     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8396     d.vkCmdWaitEvents2KHR( m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) );
8397   }
8398 
8399 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8400   template <typename Dispatch>
waitEvents2KHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos,Dispatch const & d) const8401   VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
8402   {
8403     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8404 #ifdef VULKAN_HPP_NO_EXCEPTIONS
8405     VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() );
8406 #else
8407     if ( events.size() != dependencyInfos.size() )
8408   {
8409     throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" );
8410   }
8411 #endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
8412 
8413 
8414     d.vkCmdWaitEvents2KHR( m_commandBuffer, events.size(), reinterpret_cast<const VkEvent *>( events.data() ), reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) );
8415 
8416 
8417 
8418   }
8419 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8420 
8421 
8422   template <typename Dispatch>
pipelineBarrier2KHR(const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,Dispatch const & d) const8423   VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8424   {
8425     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8426     d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
8427   }
8428 
8429 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8430   template <typename Dispatch>
pipelineBarrier2KHR(const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,Dispatch const & d) const8431   VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8432   {
8433     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8434 
8435 
8436 
8437     d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
8438 
8439 
8440 
8441   }
8442 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8443 
8444 
8445   template <typename Dispatch>
writeTimestamp2KHR(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,VULKAN_HPP_NAMESPACE::QueryPool queryPool,uint32_t query,Dispatch const & d) const8446   VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8447   {
8448     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8449     d.vkCmdWriteTimestamp2KHR( m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query );
8450   }
8451 
8452 
8453   template <typename Dispatch>
submit2KHR(uint32_t submitCount,const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const8454   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2KHR( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8455   {
8456     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8457     return static_cast<Result>( d.vkQueueSubmit2KHR( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) );
8458   }
8459 
8460 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8461   template <typename Dispatch>
submit2KHR(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,VULKAN_HPP_NAMESPACE::Fence fence,Dispatch const & d) const8462   VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
8463   {
8464     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8465 
8466 
8467 
8468     VkResult result = d.vkQueueSubmit2KHR( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) );
8469     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" );
8470 
8471     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
8472   }
8473 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8474 
8475 
8476   template <typename Dispatch>
writeBufferMarker2AMD(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,VULKAN_HPP_NAMESPACE::Buffer dstBuffer,VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,uint32_t marker,Dispatch const & d) const8477   VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8478   {
8479     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8480     d.vkCmdWriteBufferMarker2AMD( m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), marker );
8481   }
8482 
8483 
8484   template <typename Dispatch>
getCheckpointData2NV(uint32_t * pCheckpointDataCount,VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData,Dispatch const & d) const8485   VULKAN_HPP_INLINE void Queue::getCheckpointData2NV( uint32_t * pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8486   {
8487     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8488     d.vkGetQueueCheckpointData2NV( m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( pCheckpointData ) );
8489   }
8490 
8491 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8492   template <typename CheckpointData2NVAllocator, typename Dispatch>
getCheckpointData2NV(Dispatch const & d) const8493   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> Queue::getCheckpointData2NV( Dispatch const & d ) const
8494   {
8495     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8496 
8497 
8498     std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> checkpointData;
8499     uint32_t checkpointDataCount;
8500     d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr );
8501     checkpointData.resize( checkpointDataCount );
8502     d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
8503 
8504     VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
8505     if ( checkpointDataCount < checkpointData.size() )
8506     {
8507       checkpointData.resize( checkpointDataCount );
8508     }
8509     return checkpointData;
8510   }
8511 
8512   template <typename CheckpointData2NVAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, CheckpointData2NV>::value, int>::type>
getCheckpointData2NV(CheckpointData2NVAllocator & checkpointData2NVAllocator,Dispatch const & d) const8513   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const
8514   {
8515     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8516 
8517 
8518     std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> checkpointData( checkpointData2NVAllocator );
8519     uint32_t checkpointDataCount;
8520     d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr );
8521     checkpointData.resize( checkpointDataCount );
8522     d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
8523 
8524     VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
8525     if ( checkpointDataCount < checkpointData.size() )
8526     {
8527       checkpointData.resize( checkpointDataCount );
8528     }
8529     return checkpointData;
8530   }
8531 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8532 
8533   //=== VK_KHR_copy_commands2 ===
8534 
8535 
8536   template <typename Dispatch>
copyBuffer2KHR(const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo,Dispatch const & d) const8537   VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8538   {
8539     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8540     d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) );
8541   }
8542 
8543 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8544   template <typename Dispatch>
copyBuffer2KHR(const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo,Dispatch const & d) const8545   VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8546   {
8547     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8548 
8549 
8550 
8551     d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( &copyBufferInfo ) );
8552 
8553 
8554 
8555   }
8556 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8557 
8558 
8559   template <typename Dispatch>
copyImage2KHR(const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo,Dispatch const & d) const8560   VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8561   {
8562     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8563     d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) );
8564   }
8565 
8566 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8567   template <typename Dispatch>
copyImage2KHR(const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo,Dispatch const & d) const8568   VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8569   {
8570     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8571 
8572 
8573 
8574     d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( &copyImageInfo ) );
8575 
8576 
8577 
8578   }
8579 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8580 
8581 
8582   template <typename Dispatch>
copyBufferToImage2KHR(const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo,Dispatch const & d) const8583   VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8584   {
8585     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8586     d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) );
8587   }
8588 
8589 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8590   template <typename Dispatch>
copyBufferToImage2KHR(const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo,Dispatch const & d) const8591   VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8592   {
8593     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8594 
8595 
8596 
8597     d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( &copyBufferToImageInfo ) );
8598 
8599 
8600 
8601   }
8602 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8603 
8604 
8605   template <typename Dispatch>
copyImageToBuffer2KHR(const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo,Dispatch const & d) const8606   VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8607   {
8608     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8609     d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) );
8610   }
8611 
8612 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8613   template <typename Dispatch>
copyImageToBuffer2KHR(const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo,Dispatch const & d) const8614   VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8615   {
8616     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8617 
8618 
8619 
8620     d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( &copyImageToBufferInfo ) );
8621 
8622 
8623 
8624   }
8625 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8626 
8627 
8628   template <typename Dispatch>
blitImage2KHR(const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo,Dispatch const & d) const8629   VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8630   {
8631     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8632     d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) );
8633   }
8634 
8635 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8636   template <typename Dispatch>
blitImage2KHR(const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo,Dispatch const & d) const8637   VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8638   {
8639     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8640 
8641 
8642 
8643     d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) );
8644 
8645 
8646 
8647   }
8648 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8649 
8650 
8651   template <typename Dispatch>
resolveImage2KHR(const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo,Dispatch const & d) const8652   VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8653   {
8654     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8655     d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) );
8656   }
8657 
8658 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8659   template <typename Dispatch>
resolveImage2KHR(const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo,Dispatch const & d) const8660   VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8661   {
8662     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8663 
8664 
8665 
8666     d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) );
8667 
8668 
8669 
8670   }
8671 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8672 
8673   //=== VK_EXT_vertex_input_dynamic_state ===
8674 
8675 
8676   template <typename Dispatch>
setVertexInputEXT(uint32_t vertexBindingDescriptionCount,const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions,uint32_t vertexAttributeDescriptionCount,const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions,Dispatch const & d) const8677   VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( uint32_t vertexBindingDescriptionCount, const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions, uint32_t vertexAttributeDescriptionCount, const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8678   {
8679     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8680     d.vkCmdSetVertexInputEXT( m_commandBuffer, vertexBindingDescriptionCount, reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( pVertexBindingDescriptions ), vertexAttributeDescriptionCount, reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( pVertexAttributeDescriptions ) );
8681   }
8682 
8683 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8684   template <typename Dispatch>
setVertexInputEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions,VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions,Dispatch const & d) const8685   VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8686   {
8687     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8688 
8689 
8690 
8691     d.vkCmdSetVertexInputEXT( m_commandBuffer, vertexBindingDescriptions.size(), reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( vertexBindingDescriptions.data() ), vertexAttributeDescriptions.size(), reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( vertexAttributeDescriptions.data() ) );
8692 
8693 
8694 
8695   }
8696 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8697 
8698 #if defined( VK_USE_PLATFORM_SCI )
8699   //=== VK_NV_external_sci_sync ===
8700 
8701 
8702   template <typename Dispatch>
getFenceSciSyncFenceNV(const VULKAN_HPP_NAMESPACE::FenceGetSciSyncInfoNV * pGetSciSyncHandleInfo,void * pHandle,Dispatch const & d) const8703   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceSciSyncFenceNV( const VULKAN_HPP_NAMESPACE::FenceGetSciSyncInfoNV * pGetSciSyncHandleInfo, void * pHandle, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8704   {
8705     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8706     return static_cast<Result>( d.vkGetFenceSciSyncFenceNV( m_device, reinterpret_cast<const VkFenceGetSciSyncInfoNV *>( pGetSciSyncHandleInfo ), pHandle ) );
8707   }
8708 
8709 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8710   template <typename HandleType, typename Dispatch>
getFenceSciSyncFenceNV(const VULKAN_HPP_NAMESPACE::FenceGetSciSyncInfoNV & getSciSyncHandleInfo,Dispatch const & d) const8711   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HandleType>::type Device::getFenceSciSyncFenceNV( const VULKAN_HPP_NAMESPACE::FenceGetSciSyncInfoNV & getSciSyncHandleInfo, Dispatch const & d ) const
8712   {
8713     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8714 
8715 
8716     HandleType handle;
8717     VkResult result = d.vkGetFenceSciSyncFenceNV( m_device, reinterpret_cast<const VkFenceGetSciSyncInfoNV *>( &getSciSyncHandleInfo ), &handle );
8718     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceSciSyncFenceNV" );
8719 
8720     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), handle );
8721   }
8722 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8723 
8724 
8725   template <typename Dispatch>
getFenceSciSyncObjNV(const VULKAN_HPP_NAMESPACE::FenceGetSciSyncInfoNV * pGetSciSyncHandleInfo,void * pHandle,Dispatch const & d) const8726   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceSciSyncObjNV( const VULKAN_HPP_NAMESPACE::FenceGetSciSyncInfoNV * pGetSciSyncHandleInfo, void * pHandle, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8727   {
8728     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8729     return static_cast<Result>( d.vkGetFenceSciSyncObjNV( m_device, reinterpret_cast<const VkFenceGetSciSyncInfoNV *>( pGetSciSyncHandleInfo ), pHandle ) );
8730   }
8731 
8732 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8733   template <typename HandleType, typename Dispatch>
getFenceSciSyncObjNV(const VULKAN_HPP_NAMESPACE::FenceGetSciSyncInfoNV & getSciSyncHandleInfo,Dispatch const & d) const8734   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HandleType>::type Device::getFenceSciSyncObjNV( const VULKAN_HPP_NAMESPACE::FenceGetSciSyncInfoNV & getSciSyncHandleInfo, Dispatch const & d ) const
8735   {
8736     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8737 
8738 
8739     HandleType handle;
8740     VkResult result = d.vkGetFenceSciSyncObjNV( m_device, reinterpret_cast<const VkFenceGetSciSyncInfoNV *>( &getSciSyncHandleInfo ), &handle );
8741     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceSciSyncObjNV" );
8742 
8743     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), handle );
8744   }
8745 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8746 
8747 
8748   template <typename Dispatch>
importFenceSciSyncFenceNV(const VULKAN_HPP_NAMESPACE::ImportFenceSciSyncInfoNV * pImportFenceSciSyncInfo,Dispatch const & d) const8749   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceSciSyncFenceNV( const VULKAN_HPP_NAMESPACE::ImportFenceSciSyncInfoNV * pImportFenceSciSyncInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8750   {
8751     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8752     return static_cast<Result>( d.vkImportFenceSciSyncFenceNV( m_device, reinterpret_cast<const VkImportFenceSciSyncInfoNV *>( pImportFenceSciSyncInfo ) ) );
8753   }
8754 
8755 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8756   template <typename Dispatch>
importFenceSciSyncFenceNV(const VULKAN_HPP_NAMESPACE::ImportFenceSciSyncInfoNV & importFenceSciSyncInfo,Dispatch const & d) const8757   VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importFenceSciSyncFenceNV( const VULKAN_HPP_NAMESPACE::ImportFenceSciSyncInfoNV & importFenceSciSyncInfo, Dispatch const & d ) const
8758   {
8759     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8760 
8761 
8762 
8763     VkResult result = d.vkImportFenceSciSyncFenceNV( m_device, reinterpret_cast<const VkImportFenceSciSyncInfoNV *>( &importFenceSciSyncInfo ) );
8764     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceSciSyncFenceNV" );
8765 
8766     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
8767   }
8768 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8769 
8770 
8771   template <typename Dispatch>
importFenceSciSyncObjNV(const VULKAN_HPP_NAMESPACE::ImportFenceSciSyncInfoNV * pImportFenceSciSyncInfo,Dispatch const & d) const8772   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceSciSyncObjNV( const VULKAN_HPP_NAMESPACE::ImportFenceSciSyncInfoNV * pImportFenceSciSyncInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8773   {
8774     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8775     return static_cast<Result>( d.vkImportFenceSciSyncObjNV( m_device, reinterpret_cast<const VkImportFenceSciSyncInfoNV *>( pImportFenceSciSyncInfo ) ) );
8776   }
8777 
8778 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8779   template <typename Dispatch>
importFenceSciSyncObjNV(const VULKAN_HPP_NAMESPACE::ImportFenceSciSyncInfoNV & importFenceSciSyncInfo,Dispatch const & d) const8780   VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importFenceSciSyncObjNV( const VULKAN_HPP_NAMESPACE::ImportFenceSciSyncInfoNV & importFenceSciSyncInfo, Dispatch const & d ) const
8781   {
8782     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8783 
8784 
8785 
8786     VkResult result = d.vkImportFenceSciSyncObjNV( m_device, reinterpret_cast<const VkImportFenceSciSyncInfoNV *>( &importFenceSciSyncInfo ) );
8787     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceSciSyncObjNV" );
8788 
8789     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
8790   }
8791 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8792 
8793 
8794   template <typename Dispatch>
getSciSyncAttributesNV(const VULKAN_HPP_NAMESPACE::SciSyncAttributesInfoNV * pSciSyncAttributesInfo,NvSciSyncAttrList pAttributes,Dispatch const & d) const8795   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSciSyncAttributesNV( const VULKAN_HPP_NAMESPACE::SciSyncAttributesInfoNV * pSciSyncAttributesInfo, NvSciSyncAttrList pAttributes, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8796   {
8797     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8798     return static_cast<Result>( d.vkGetPhysicalDeviceSciSyncAttributesNV( m_physicalDevice, reinterpret_cast<const VkSciSyncAttributesInfoNV *>( pSciSyncAttributesInfo ), pAttributes ) );
8799   }
8800 
8801 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8802   template <typename Dispatch>
getSciSyncAttributesNV(const VULKAN_HPP_NAMESPACE::SciSyncAttributesInfoNV & sciSyncAttributesInfo,NvSciSyncAttrList pAttributes,Dispatch const & d) const8803   VULKAN_HPP_INLINE typename ResultValueType<void>::type PhysicalDevice::getSciSyncAttributesNV( const VULKAN_HPP_NAMESPACE::SciSyncAttributesInfoNV & sciSyncAttributesInfo, NvSciSyncAttrList pAttributes, Dispatch const & d ) const
8804   {
8805     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8806 
8807 
8808 
8809     VkResult result = d.vkGetPhysicalDeviceSciSyncAttributesNV( m_physicalDevice, reinterpret_cast<const VkSciSyncAttributesInfoNV *>( &sciSyncAttributesInfo ), pAttributes );
8810     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSciSyncAttributesNV" );
8811 
8812     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
8813   }
8814 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8815 
8816 
8817   template <typename Dispatch>
getSemaphoreSciSyncObjNV(const VULKAN_HPP_NAMESPACE::SemaphoreGetSciSyncInfoNV * pGetSciSyncInfo,void * pHandle,Dispatch const & d) const8818   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreSciSyncObjNV( const VULKAN_HPP_NAMESPACE::SemaphoreGetSciSyncInfoNV * pGetSciSyncInfo, void * pHandle, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8819   {
8820     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8821     return static_cast<Result>( d.vkGetSemaphoreSciSyncObjNV( m_device, reinterpret_cast<const VkSemaphoreGetSciSyncInfoNV *>( pGetSciSyncInfo ), pHandle ) );
8822   }
8823 
8824 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8825   template <typename HandleType, typename Dispatch>
getSemaphoreSciSyncObjNV(const VULKAN_HPP_NAMESPACE::SemaphoreGetSciSyncInfoNV & getSciSyncInfo,Dispatch const & d) const8826   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HandleType>::type Device::getSemaphoreSciSyncObjNV( const VULKAN_HPP_NAMESPACE::SemaphoreGetSciSyncInfoNV & getSciSyncInfo, Dispatch const & d ) const
8827   {
8828     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8829 
8830 
8831     HandleType handle;
8832     VkResult result = d.vkGetSemaphoreSciSyncObjNV( m_device, reinterpret_cast<const VkSemaphoreGetSciSyncInfoNV *>( &getSciSyncInfo ), &handle );
8833     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreSciSyncObjNV" );
8834 
8835     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), handle );
8836   }
8837 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8838 
8839 
8840   template <typename Dispatch>
importSemaphoreSciSyncObjNV(const VULKAN_HPP_NAMESPACE::ImportSemaphoreSciSyncInfoNV * pImportSemaphoreSciSyncInfo,Dispatch const & d) const8841   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreSciSyncObjNV( const VULKAN_HPP_NAMESPACE::ImportSemaphoreSciSyncInfoNV * pImportSemaphoreSciSyncInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8842   {
8843     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8844     return static_cast<Result>( d.vkImportSemaphoreSciSyncObjNV( m_device, reinterpret_cast<const VkImportSemaphoreSciSyncInfoNV *>( pImportSemaphoreSciSyncInfo ) ) );
8845   }
8846 
8847 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8848   template <typename Dispatch>
importSemaphoreSciSyncObjNV(const VULKAN_HPP_NAMESPACE::ImportSemaphoreSciSyncInfoNV & importSemaphoreSciSyncInfo,Dispatch const & d) const8849   VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importSemaphoreSciSyncObjNV( const VULKAN_HPP_NAMESPACE::ImportSemaphoreSciSyncInfoNV & importSemaphoreSciSyncInfo, Dispatch const & d ) const
8850   {
8851     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8852 
8853 
8854 
8855     VkResult result = d.vkImportSemaphoreSciSyncObjNV( m_device, reinterpret_cast<const VkImportSemaphoreSciSyncInfoNV *>( &importSemaphoreSciSyncInfo ) );
8856     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreSciSyncObjNV" );
8857 
8858     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
8859   }
8860 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8861 #endif /*VK_USE_PLATFORM_SCI*/
8862 
8863 #if defined( VK_USE_PLATFORM_SCI )
8864   //=== VK_NV_external_memory_sci_buf ===
8865 
8866 
8867   template <typename Dispatch>
getMemorySciBufNV(const VULKAN_HPP_NAMESPACE::MemoryGetSciBufInfoNV * pGetSciBufInfo,NvSciBufObj * pHandle,Dispatch const & d) const8868   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemorySciBufNV( const VULKAN_HPP_NAMESPACE::MemoryGetSciBufInfoNV * pGetSciBufInfo, NvSciBufObj * pHandle, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8869   {
8870     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8871     return static_cast<Result>( d.vkGetMemorySciBufNV( m_device, reinterpret_cast<const VkMemoryGetSciBufInfoNV *>( pGetSciBufInfo ), pHandle ) );
8872   }
8873 
8874 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8875   template <typename Dispatch>
getMemorySciBufNV(const VULKAN_HPP_NAMESPACE::MemoryGetSciBufInfoNV & getSciBufInfo,Dispatch const & d) const8876   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<NvSciBufObj>::type Device::getMemorySciBufNV( const VULKAN_HPP_NAMESPACE::MemoryGetSciBufInfoNV & getSciBufInfo, Dispatch const & d ) const
8877   {
8878     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8879 
8880 
8881     NvSciBufObj handle;
8882     VkResult result = d.vkGetMemorySciBufNV( m_device, reinterpret_cast<const VkMemoryGetSciBufInfoNV *>( &getSciBufInfo ), &handle );
8883     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemorySciBufNV" );
8884 
8885     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), handle );
8886   }
8887 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8888 
8889 
8890   template <typename Dispatch>
getExternalMemorySciBufPropertiesNV(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,NvSciBufObj handle,VULKAN_HPP_NAMESPACE::MemorySciBufPropertiesNV * pMemorySciBufProperties,Dispatch const & d) const8891   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getExternalMemorySciBufPropertiesNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, NvSciBufObj handle, VULKAN_HPP_NAMESPACE::MemorySciBufPropertiesNV * pMemorySciBufProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8892   {
8893     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8894     return static_cast<Result>( d.vkGetPhysicalDeviceExternalMemorySciBufPropertiesNV( m_physicalDevice, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), handle, reinterpret_cast<VkMemorySciBufPropertiesNV *>( pMemorySciBufProperties ) ) );
8895   }
8896 
8897 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8898   template <typename Dispatch>
getExternalMemorySciBufPropertiesNV(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,NvSciBufObj handle,Dispatch const & d) const8899   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemorySciBufPropertiesNV>::type PhysicalDevice::getExternalMemorySciBufPropertiesNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, NvSciBufObj handle, Dispatch const & d ) const
8900   {
8901     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8902 
8903 
8904     VULKAN_HPP_NAMESPACE::MemorySciBufPropertiesNV memorySciBufProperties;
8905     VkResult result = d.vkGetPhysicalDeviceExternalMemorySciBufPropertiesNV( m_physicalDevice, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), handle, reinterpret_cast<VkMemorySciBufPropertiesNV *>( &memorySciBufProperties ) );
8906     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalMemorySciBufPropertiesNV" );
8907 
8908     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memorySciBufProperties );
8909   }
8910 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
8911 
8912 
8913 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
8914   template <typename Dispatch>
getSciBufAttributesNV(NvSciBufAttrList pAttributes,Dispatch const & d) const8915   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSciBufAttributesNV( NvSciBufAttrList pAttributes, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8916   {
8917     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8918     return static_cast<Result>( d.vkGetPhysicalDeviceSciBufAttributesNV( m_physicalDevice, pAttributes ) );
8919   }
8920 #else
8921   template <typename Dispatch>
getSciBufAttributesNV(NvSciBufAttrList pAttributes,Dispatch const & d) const8922   VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type PhysicalDevice::getSciBufAttributesNV( NvSciBufAttrList pAttributes, Dispatch const & d ) const
8923   {
8924     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8925 
8926 
8927 
8928     VkResult result = d.vkGetPhysicalDeviceSciBufAttributesNV( m_physicalDevice, pAttributes );
8929     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSciBufAttributesNV" );
8930 
8931     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
8932   }
8933 #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
8934 #endif /*VK_USE_PLATFORM_SCI*/
8935 
8936   //=== VK_EXT_extended_dynamic_state2 ===
8937 
8938 
8939   template <typename Dispatch>
setPatchControlPointsEXT(uint32_t patchControlPoints,Dispatch const & d) const8940   VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8941   {
8942     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8943     d.vkCmdSetPatchControlPointsEXT( m_commandBuffer, patchControlPoints );
8944   }
8945 
8946 
8947   template <typename Dispatch>
setRasterizerDiscardEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable,Dispatch const & d) const8948   VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8949   {
8950     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8951     d.vkCmdSetRasterizerDiscardEnableEXT( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) );
8952   }
8953 
8954 
8955   template <typename Dispatch>
setDepthBiasEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable,Dispatch const & d) const8956   VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8957   {
8958     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8959     d.vkCmdSetDepthBiasEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) );
8960   }
8961 
8962 
8963   template <typename Dispatch>
setLogicOpEXT(VULKAN_HPP_NAMESPACE::LogicOp logicOp,Dispatch const & d) const8964   VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8965   {
8966     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8967     d.vkCmdSetLogicOpEXT( m_commandBuffer, static_cast<VkLogicOp>( logicOp ) );
8968   }
8969 
8970 
8971   template <typename Dispatch>
setPrimitiveRestartEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable,Dispatch const & d) const8972   VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8973   {
8974     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8975     d.vkCmdSetPrimitiveRestartEnableEXT( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) );
8976   }
8977 
8978   //=== VK_EXT_color_write_enable ===
8979 
8980 
8981   template <typename Dispatch>
setColorWriteEnableEXT(uint32_t attachmentCount,const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables,Dispatch const & d) const8982   VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
8983   {
8984     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8985     d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorWriteEnables ) );
8986   }
8987 
8988 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8989   template <typename Dispatch>
setColorWriteEnableEXT(VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables,Dispatch const & d) const8990   VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
8991   {
8992     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
8993 
8994 
8995 
8996     d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, colorWriteEnables.size(), reinterpret_cast<const VkBool32 *>( colorWriteEnables.data() ) );
8997 
8998 
8999 
9000   }
9001 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9002 
9003 #if defined( VK_USE_PLATFORM_SCI )
9004   //=== VK_NV_external_sci_sync2 ===
9005 
9006 
9007   template <typename Dispatch>
createSemaphoreSciSyncPoolNV(const VULKAN_HPP_NAMESPACE::SemaphoreSciSyncPoolCreateInfoNV * pCreateInfo,const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,VULKAN_HPP_NAMESPACE::SemaphoreSciSyncPoolNV * pSemaphorePool,Dispatch const & d) const9008   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSemaphoreSciSyncPoolNV( const VULKAN_HPP_NAMESPACE::SemaphoreSciSyncPoolCreateInfoNV * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SemaphoreSciSyncPoolNV * pSemaphorePool, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
9009   {
9010     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9011     return static_cast<Result>( d.vkCreateSemaphoreSciSyncPoolNV( m_device, reinterpret_cast<const VkSemaphoreSciSyncPoolCreateInfoNV *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSemaphoreSciSyncPoolNV *>( pSemaphorePool ) ) );
9012   }
9013 
9014 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9015   template <typename Dispatch>
createSemaphoreSciSyncPoolNV(const VULKAN_HPP_NAMESPACE::SemaphoreSciSyncPoolCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9016   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SemaphoreSciSyncPoolNV>::type Device::createSemaphoreSciSyncPoolNV( const VULKAN_HPP_NAMESPACE::SemaphoreSciSyncPoolCreateInfoNV & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
9017   {
9018     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9019 
9020 
9021     VULKAN_HPP_NAMESPACE::SemaphoreSciSyncPoolNV semaphorePool;
9022     VkResult result = d.vkCreateSemaphoreSciSyncPoolNV( m_device, reinterpret_cast<const VkSemaphoreSciSyncPoolCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSemaphoreSciSyncPoolNV *>( &semaphorePool ) );
9023     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreSciSyncPoolNV" );
9024 
9025     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), semaphorePool );
9026   }
9027 
9028 #  ifndef VULKAN_HPP_NO_SMART_HANDLE
9029   template <typename Dispatch>
createSemaphoreSciSyncPoolNVUnique(const VULKAN_HPP_NAMESPACE::SemaphoreSciSyncPoolCreateInfoNV & createInfo,Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,Dispatch const & d) const9030   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SemaphoreSciSyncPoolNV, Dispatch>>::type Device::createSemaphoreSciSyncPoolNVUnique( const VULKAN_HPP_NAMESPACE::SemaphoreSciSyncPoolCreateInfoNV & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
9031   {
9032     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9033 
9034 
9035     VULKAN_HPP_NAMESPACE::SemaphoreSciSyncPoolNV semaphorePool;
9036     VkResult result = d.vkCreateSemaphoreSciSyncPoolNV( m_device, reinterpret_cast<const VkSemaphoreSciSyncPoolCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSemaphoreSciSyncPoolNV *>( &semaphorePool ) );
9037     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreSciSyncPoolNVUnique" );
9038 
9039     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SemaphoreSciSyncPoolNV, Dispatch>( semaphorePool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
9040   }
9041 #  endif /* VULKAN_HPP_NO_SMART_HANDLE */
9042 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9043 #endif /*VK_USE_PLATFORM_SCI*/
9044 
9045 #if defined( VK_USE_PLATFORM_SCREEN_QNX )
9046   //=== VK_QNX_external_memory_screen_buffer ===
9047 
9048 
9049   template <typename Dispatch>
getScreenBufferPropertiesQNX(const struct _screen_buffer * buffer,VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX * pProperties,Dispatch const & d) const9050   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getScreenBufferPropertiesQNX( const struct _screen_buffer * buffer, VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
9051   {
9052     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9053     return static_cast<Result>( d.vkGetScreenBufferPropertiesQNX( m_device, buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( pProperties ) ) );
9054   }
9055 
9056 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9057   template <typename Dispatch>
getScreenBufferPropertiesQNX(const struct _screen_buffer & buffer,Dispatch const & d) const9058   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX>::type Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer, Dispatch const & d ) const
9059   {
9060     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9061 
9062 
9063     VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX properties;
9064     VkResult result = d.vkGetScreenBufferPropertiesQNX( m_device, &buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( &properties ) );
9065     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" );
9066 
9067     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
9068   }
9069 
9070   template <typename X, typename Y, typename... Z, typename Dispatch>
getScreenBufferPropertiesQNX(const struct _screen_buffer & buffer,Dispatch const & d) const9071   VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer, Dispatch const & d ) const
9072   {
9073     VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
9074 
9075 
9076     StructureChain<X, Y, Z...> structureChain;
9077     VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX>();
9078     VkResult result = d.vkGetScreenBufferPropertiesQNX( m_device, &buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( &properties ) );
9079     resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" );
9080 
9081     return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
9082   }
9083 #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
9084 #endif /*VK_USE_PLATFORM_SCREEN_QNX*/
9085 
9086 
9087 }   // namespace VULKAN_HPP_NAMESPACE
9088 #endif
9089