8 #ifndef VULKAN_FUNCS_HPP
9 #define VULKAN_FUNCS_HPP
19 template <
typename Dispatch>
26 return static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( pCreateInfo ),
27 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
28 reinterpret_cast<VkInstance *>( pInstance ) ) );
31 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
32 template <
typename Dispatch>
40 d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ),
41 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
42 reinterpret_cast<VkInstance *>( &instance ) );
48 # ifndef VULKAN_HPP_NO_SMART_HANDLE
49 template <
typename Dispatch>
57 d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ),
58 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
59 reinterpret_cast<VkInstance *>( &instance ) );
68 template <
typename Dispatch>
72 d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
75 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
76 template <
typename Dispatch>
81 d.vkDestroyInstance( m_instance,
82 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
86 template <
typename Dispatch>
92 return static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( pPhysicalDevices ) ) );
95 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
96 template <
typename PhysicalDeviceAllocator,
typename Dispatch>
102 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator> physicalDevices;
103 uint32_t physicalDeviceCount;
107 result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount,
nullptr );
108 if ( ( result ==
VK_SUCCESS ) && physicalDeviceCount )
110 physicalDevices.resize( physicalDeviceCount );
111 result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) );
116 if ( physicalDeviceCount < physicalDevices.size() )
118 physicalDevices.resize( physicalDeviceCount );
120 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDevices );
123 template <
typename PhysicalDeviceAllocator,
132 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator> physicalDevices( physicalDeviceAllocator );
133 uint32_t physicalDeviceCount;
137 result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount,
nullptr );
138 if ( ( result ==
VK_SUCCESS ) && physicalDeviceCount )
140 physicalDevices.resize( physicalDeviceCount );
141 result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) );
146 if ( physicalDeviceCount < physicalDevices.size() )
148 physicalDevices.resize( physicalDeviceCount );
150 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDevices );
154 template <
typename Dispatch>
158 d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( pFeatures ) );
161 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
162 template <
typename Dispatch>
169 d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( &features ) );
175 template <
typename Dispatch>
181 d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>(
format ), reinterpret_cast<VkFormatProperties *>( pFormatProperties ) );
184 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
185 template <
typename Dispatch>
192 d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>(
format ), reinterpret_cast<VkFormatProperties *>( &formatProperties ) );
194 return formatProperties;
198 template <
typename Dispatch>
208 return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice,
209 static_cast<VkFormat>(
format ),
210 static_cast<VkImageType>(
type ),
211 static_cast<VkImageTiling>( tiling ),
212 static_cast<VkImageUsageFlags>(
usage ),
213 static_cast<VkImageCreateFlags>(
flags ),
214 reinterpret_cast<VkImageFormatProperties *>( pImageFormatProperties ) ) );
217 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
218 template <
typename Dispatch>
225 Dispatch
const & d )
const
230 VkResult result = d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice,
231 static_cast<VkFormat>( format ),
232 static_cast<VkImageType>( type ),
233 static_cast<VkImageTiling>( tiling ),
234 static_cast<VkImageUsageFlags>( usage ),
235 static_cast<VkImageCreateFlags>( flags ),
236 reinterpret_cast<VkImageFormatProperties *>( &imageFormatProperties ) );
239 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
243 template <
typename Dispatch>
248 d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( pProperties ) );
251 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
252 template <
typename Dispatch>
259 d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( &properties ) );
265 template <
typename Dispatch>
271 d.vkGetPhysicalDeviceQueueFamilyProperties(
272 m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( pQueueFamilyProperties ) );
275 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
276 template <
typename QueueFamilyPropertiesAllocator,
typename Dispatch>
282 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties;
283 uint32_t queueFamilyPropertyCount;
284 d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount,
nullptr );
285 queueFamilyProperties.resize( queueFamilyPropertyCount );
286 d.vkGetPhysicalDeviceQueueFamilyProperties(
287 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
290 if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
292 queueFamilyProperties.resize( queueFamilyPropertyCount );
294 return queueFamilyProperties;
297 template <
typename QueueFamilyPropertiesAllocator,
306 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties( queueFamilyPropertiesAllocator );
307 uint32_t queueFamilyPropertyCount;
308 d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount,
nullptr );
309 queueFamilyProperties.resize( queueFamilyPropertyCount );
310 d.vkGetPhysicalDeviceQueueFamilyProperties(
311 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
314 if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
316 queueFamilyProperties.resize( queueFamilyPropertyCount );
318 return queueFamilyProperties;
322 template <
typename Dispatch>
327 d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( pMemoryProperties ) );
330 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
331 template <
typename Dispatch>
338 d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( &memoryProperties ) );
340 return memoryProperties;
344 template <
typename Dispatch>
348 return d.vkGetInstanceProcAddr( m_instance, pName );
351 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
352 template <
typename Dispatch>
363 template <
typename Dispatch>
367 return d.vkGetDeviceProcAddr( m_device, pName );
370 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
371 template <
typename Dispatch>
382 template <
typename Dispatch>
389 return static_cast<Result>( d.vkCreateDevice( m_physicalDevice,
390 reinterpret_cast<const VkDeviceCreateInfo *>( pCreateInfo ),
391 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
392 reinterpret_cast<VkDevice *>( pDevice ) ) );
395 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
396 template <
typename Dispatch>
404 d.vkCreateDevice( m_physicalDevice,
405 reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ),
406 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
407 reinterpret_cast<VkDevice *>( &device ) );
413 # ifndef VULKAN_HPP_NO_SMART_HANDLE
414 template <
typename Dispatch>
418 Dispatch
const & d )
const
424 d.vkCreateDevice( m_physicalDevice,
425 reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ),
426 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
427 reinterpret_cast<VkDevice *>( &device ) );
436 template <
typename Dispatch>
440 d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
443 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
444 template <
typename Dispatch>
449 d.vkDestroyDevice( m_device,
450 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
454 template <
typename Dispatch>
456 uint32_t * pPropertyCount,
461 return static_cast<Result>(
462 d.vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) );
465 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
466 template <
typename ExtensionPropertiesAllocator,
typename Dispatch>
472 std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties;
473 uint32_t propertyCount;
477 result = d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() :
nullptr, &propertyCount, nullptr );
478 if ( ( result ==
VK_SUCCESS ) && propertyCount )
480 properties.resize( propertyCount );
481 result = d.vkEnumerateInstanceExtensionProperties(
482 layerName ? layerName->c_str() :
nullptr, &propertyCount,
reinterpret_cast<VkExtensionProperties *
>( properties.data() ) );
487 if ( propertyCount < properties.size() )
489 properties.resize( propertyCount );
494 template <
typename ExtensionPropertiesAllocator,
500 ExtensionPropertiesAllocator & extensionPropertiesAllocator,
505 std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator );
506 uint32_t propertyCount;
510 result = d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() :
nullptr, &propertyCount, nullptr );
511 if ( ( result ==
VK_SUCCESS ) && propertyCount )
513 properties.resize( propertyCount );
514 result = d.vkEnumerateInstanceExtensionProperties(
515 layerName ? layerName->c_str() :
nullptr, &propertyCount,
reinterpret_cast<VkExtensionProperties *
>( properties.data() ) );
520 if ( propertyCount < properties.size() )
522 properties.resize( propertyCount );
528 template <
typename Dispatch>
530 uint32_t * pPropertyCount,
535 return static_cast<Result>(
536 d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) );
539 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
540 template <
typename ExtensionPropertiesAllocator,
typename Dispatch>
546 std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties;
547 uint32_t propertyCount;
551 result = d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() :
nullptr, &propertyCount, nullptr );
552 if ( ( result ==
VK_SUCCESS ) && propertyCount )
554 properties.resize( propertyCount );
555 result = d.vkEnumerateDeviceExtensionProperties(
556 m_physicalDevice, layerName ? layerName->c_str() :
nullptr, &propertyCount,
reinterpret_cast<VkExtensionProperties *
>( properties.data() ) );
561 if ( propertyCount < properties.size() )
563 properties.resize( propertyCount );
568 template <
typename ExtensionPropertiesAllocator,
574 ExtensionPropertiesAllocator & extensionPropertiesAllocator,
575 Dispatch
const & d )
const
579 std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator );
580 uint32_t propertyCount;
584 result = d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() :
nullptr, &propertyCount, nullptr );
585 if ( ( result ==
VK_SUCCESS ) && propertyCount )
587 properties.resize( propertyCount );
588 result = d.vkEnumerateDeviceExtensionProperties(
589 m_physicalDevice, layerName ? layerName->c_str() :
nullptr, &propertyCount,
reinterpret_cast<VkExtensionProperties *
>( properties.data() ) );
594 if ( propertyCount < properties.size() )
596 properties.resize( propertyCount );
602 template <
typename Dispatch>
608 return static_cast<Result>( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) );
611 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
612 template <
typename LayerPropertiesAllocator,
typename Dispatch>
618 std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties;
619 uint32_t propertyCount;
623 result = d.vkEnumerateInstanceLayerProperties( &propertyCount,
nullptr );
624 if ( ( result ==
VK_SUCCESS ) && propertyCount )
626 properties.resize( propertyCount );
627 result = d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
632 if ( propertyCount < properties.size() )
634 properties.resize( propertyCount );
639 template <
typename LayerPropertiesAllocator,
648 std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator );
649 uint32_t propertyCount;
653 result = d.vkEnumerateInstanceLayerProperties( &propertyCount,
nullptr );
654 if ( ( result ==
VK_SUCCESS ) && propertyCount )
656 properties.resize( propertyCount );
657 result = d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
662 if ( propertyCount < properties.size() )
664 properties.resize( propertyCount );
670 template <
typename Dispatch>
676 return static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) );
679 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
680 template <
typename LayerPropertiesAllocator,
typename Dispatch>
686 std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties;
687 uint32_t propertyCount;
691 result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount,
nullptr );
692 if ( ( result ==
VK_SUCCESS ) && propertyCount )
694 properties.resize( propertyCount );
695 result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
700 if ( propertyCount < properties.size() )
702 properties.resize( propertyCount );
707 template <
typename LayerPropertiesAllocator,
716 std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator );
717 uint32_t propertyCount;
721 result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount,
nullptr );
722 if ( ( result ==
VK_SUCCESS ) && propertyCount )
724 properties.resize( propertyCount );
725 result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
730 if ( propertyCount < properties.size() )
732 properties.resize( propertyCount );
738 template <
typename Dispatch>
743 d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( pQueue ) );
746 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
747 template <
typename Dispatch>
754 d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( &queue ) );
760 template <
typename Dispatch>
767 return static_cast<Result>( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo *>( pSubmits ), static_cast<VkFence>( fence ) ) );
770 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
771 template <
typename Dispatch>
784 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
785 template <
typename Dispatch>
789 return static_cast<Result>( d.vkQueueWaitIdle( m_queue ) );
792 template <
typename Dispatch>
804 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
805 template <
typename Dispatch>
809 return static_cast<Result>( d.vkDeviceWaitIdle( m_device ) );
812 template <
typename Dispatch>
824 template <
typename Dispatch>
831 return static_cast<Result>( d.vkAllocateMemory( m_device,
832 reinterpret_cast<const VkMemoryAllocateInfo *>( pAllocateInfo ),
833 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
834 reinterpret_cast<VkDeviceMemory *>( pMemory ) ) );
837 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
838 template <
typename Dispatch>
842 Dispatch
const & d )
const
848 d.vkAllocateMemory( m_device,
849 reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ),
850 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
851 reinterpret_cast<VkDeviceMemory *>( &memory ) );
857 # ifndef VULKAN_HPP_NO_SMART_HANDLE
858 template <
typename Dispatch>
862 Dispatch
const & d )
const
868 d.vkAllocateMemory( m_device,
869 reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ),
870 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
871 reinterpret_cast<VkDeviceMemory *>( &memory ) );
880 template <
typename Dispatch>
886 d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>(
memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
889 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
890 template <
typename Dispatch>
897 d.vkFreeMemory( m_device,
898 static_cast<VkDeviceMemory>(
memory ),
899 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
903 template <
typename Dispatch>
909 d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>(
memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
912 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
913 template <
typename Dispatch>
915 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
920 d.vkFreeMemory( m_device,
921 static_cast<VkDeviceMemory>(
memory ),
922 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
926 template <
typename Dispatch>
935 return static_cast<Result>( d.vkMapMemory( m_device,
936 static_cast<VkDeviceMemory>(
memory ),
937 static_cast<VkDeviceSize>(
offset ),
938 static_cast<VkDeviceSize>(
size ),
939 static_cast<VkMemoryMapFlags>(
flags ),
943 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
944 template <
typename Dispatch>
949 Dispatch
const & d )
const
955 static_cast<VkDeviceMemory>( memory ),
956 static_cast<VkDeviceSize>( offset ),
957 static_cast<VkDeviceSize>( size ),
958 static_cast<VkMemoryMapFlags>( flags ),
966 template <
typename Dispatch>
970 d.vkUnmapMemory( m_device, static_cast<VkDeviceMemory>(
memory ) );
973 template <
typename Dispatch>
979 return static_cast<Result>( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
982 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
983 template <
typename Dispatch>
986 Dispatch
const & d )
const
997 template <
typename Dispatch>
1003 return static_cast<Result>(
1004 d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
1007 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1008 template <
typename Dispatch>
1011 Dispatch
const & d )
const
1022 template <
typename Dispatch>
1028 d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>(
memory ), reinterpret_cast<VkDeviceSize *>( pCommittedMemoryInBytes ) );
1031 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1032 template <
typename Dispatch>
1039 d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>(
memory ), reinterpret_cast<VkDeviceSize *>( &committedMemoryInBytes ) );
1041 return committedMemoryInBytes;
1045 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
1046 template <
typename Dispatch>
1053 return static_cast<Result>(
1054 d.vkBindBufferMemory( m_device, static_cast<VkBuffer>(
buffer ), static_cast<VkDeviceMemory>(
memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
1057 template <
typename Dispatch>
1064 d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) );
1071 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
1072 template <
typename Dispatch>
1079 return static_cast<Result>(
1080 d.vkBindImageMemory( m_device, static_cast<VkImage>(
image ), static_cast<VkDeviceMemory>(
memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
1083 template <
typename Dispatch>
1090 d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) );
1097 template <
typename Dispatch>
1103 d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>(
buffer ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) );
1106 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1107 template <
typename Dispatch>
1114 d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>(
buffer ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
1116 return memoryRequirements;
1120 template <
typename Dispatch>
1126 d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>(
image ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) );
1129 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1130 template <
typename Dispatch>
1137 d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>(
image ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
1139 return memoryRequirements;
1143 template <
typename Dispatch>
1145 uint32_t * pSparseMemoryRequirementCount,
1150 d.vkGetImageSparseMemoryRequirements( m_device,
1151 static_cast<VkImage>(
image ),
1152 pSparseMemoryRequirementCount,
1153 reinterpret_cast<VkSparseImageMemoryRequirements *>( pSparseMemoryRequirements ) );
1156 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1157 template <
typename SparseImageMemoryRequirementsAllocator,
typename Dispatch>
1163 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements;
1164 uint32_t sparseMemoryRequirementCount;
1165 d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount,
nullptr );
1166 sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
1167 d.vkGetImageSparseMemoryRequirements( m_device,
1168 static_cast<VkImage>( image ),
1169 &sparseMemoryRequirementCount,
1170 reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
1172 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
1173 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
1175 sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
1177 return sparseMemoryRequirements;
1180 template <
typename SparseImageMemoryRequirementsAllocator,
1186 SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator,
1187 Dispatch
const & d )
const
1191 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements(
1192 sparseImageMemoryRequirementsAllocator );
1193 uint32_t sparseMemoryRequirementCount;
1194 d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount,
nullptr );
1195 sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
1196 d.vkGetImageSparseMemoryRequirements( m_device,
1197 static_cast<VkImage>( image ),
1198 &sparseMemoryRequirementCount,
1199 reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
1201 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
1202 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
1204 sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
1206 return sparseMemoryRequirements;
1210 template <
typename Dispatch>
1216 uint32_t * pPropertyCount,
1221 d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
1222 static_cast<VkFormat>(
format ),
1223 static_cast<VkImageType>(
type ),
1224 static_cast<VkSampleCountFlagBits>(
samples ),
1225 static_cast<VkImageUsageFlags>(
usage ),
1226 static_cast<VkImageTiling>( tiling ),
1228 reinterpret_cast<VkSparseImageFormatProperties *>( pProperties ) );
1231 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1232 template <
typename SparseImageFormatPropertiesAllocator,
typename Dispatch>
1239 Dispatch
const & d )
const
1243 std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties;
1244 uint32_t propertyCount;
1245 d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
1246 static_cast<VkFormat>( format ),
1247 static_cast<VkImageType>( type ),
1248 static_cast<VkSampleCountFlagBits>( samples ),
1249 static_cast<VkImageUsageFlags>( usage ),
1250 static_cast<VkImageTiling>( tiling ),
1253 properties.resize( propertyCount );
1254 d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
1255 static_cast<VkFormat>( format ),
1256 static_cast<VkImageType>( type ),
1257 static_cast<VkSampleCountFlagBits>( samples ),
1258 static_cast<VkImageUsageFlags>( usage ),
1259 static_cast<VkImageTiling>( tiling ),
1261 reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
1264 if ( propertyCount < properties.size() )
1266 properties.resize( propertyCount );
1271 template <
typename SparseImageFormatPropertiesAllocator,
1281 SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator,
1282 Dispatch
const & d )
const
1286 std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties( sparseImageFormatPropertiesAllocator );
1287 uint32_t propertyCount;
1288 d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
1289 static_cast<VkFormat>( format ),
1290 static_cast<VkImageType>( type ),
1291 static_cast<VkSampleCountFlagBits>( samples ),
1292 static_cast<VkImageUsageFlags>( usage ),
1293 static_cast<VkImageTiling>( tiling ),
1296 properties.resize( propertyCount );
1297 d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
1298 static_cast<VkFormat>( format ),
1299 static_cast<VkImageType>( type ),
1300 static_cast<VkSampleCountFlagBits>( samples ),
1301 static_cast<VkImageUsageFlags>( usage ),
1302 static_cast<VkImageTiling>( tiling ),
1304 reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
1307 if ( propertyCount < properties.size() )
1309 properties.resize( propertyCount );
1315 template <
typename Dispatch>
1322 return static_cast<Result>(
1323 d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast<const VkBindSparseInfo *>( pBindInfo ), static_cast<VkFence>( fence ) ) );
1326 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1327 template <
typename Dispatch>
1334 d.vkQueueBindSparse( m_queue, bindInfo.
size(),
reinterpret_cast<const VkBindSparseInfo *
>( bindInfo.
data() ), static_cast<VkFence>( fence ) );
1341 template <
typename Dispatch>
1348 return static_cast<Result>( d.vkCreateFence( m_device,
1349 reinterpret_cast<const VkFenceCreateInfo *>( pCreateInfo ),
1350 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
1351 reinterpret_cast<VkFence *>( pFence ) ) );
1354 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1355 template <
typename Dispatch>
1363 d.vkCreateFence( m_device,
1364 reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ),
1365 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
1366 reinterpret_cast<VkFence *>( &fence ) );
1372 # ifndef VULKAN_HPP_NO_SMART_HANDLE
1373 template <
typename Dispatch>
1381 d.vkCreateFence( m_device,
1382 reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ),
1383 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
1384 reinterpret_cast<VkFence *>( &fence ) );
1393 template <
typename Dispatch>
1399 d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1402 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1403 template <
typename Dispatch>
1410 d.vkDestroyFence( m_device,
1411 static_cast<VkFence>( fence ),
1412 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1416 template <
typename Dispatch>
1422 d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1425 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1426 template <
typename Dispatch>
1433 d.vkDestroyFence( m_device,
1434 static_cast<VkFence>( fence ),
1435 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1439 template <
typename Dispatch>
1445 return static_cast<Result>( d.vkResetFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ) ) );
1448 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1449 template <
typename Dispatch>
1455 VkResult result = d.vkResetFences( m_device, fences.
size(),
reinterpret_cast<const VkFence *
>( fences.
data() ) );
1462 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
1463 template <
typename Dispatch>
1467 return static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
1470 template <
typename Dispatch>
1475 VkResult result = d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) );
1476 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
1484 template <
typename Dispatch>
1492 return static_cast<Result>(
1493 d.vkWaitForFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ), static_cast<VkBool32>( waitAll ),
timeout ) );
1496 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1497 template <
typename Dispatch>
1502 Dispatch
const & d )
const
1507 d.vkWaitForFences( m_device, fences.
size(),
reinterpret_cast<const VkFence *
>( fences.
data() ), static_cast<VkBool32>( waitAll ),
timeout );
1508 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
1516 template <
typename Dispatch>
1523 return static_cast<Result>( d.vkCreateSemaphore( m_device,
1524 reinterpret_cast<const VkSemaphoreCreateInfo *>( pCreateInfo ),
1525 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
1526 reinterpret_cast<VkSemaphore *>( pSemaphore ) ) );
1529 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1530 template <
typename Dispatch>
1534 Dispatch
const & d )
const
1540 d.vkCreateSemaphore( m_device,
1541 reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ),
1542 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
1543 reinterpret_cast<VkSemaphore *>( &semaphore ) );
1549 # ifndef VULKAN_HPP_NO_SMART_HANDLE
1550 template <
typename Dispatch>
1554 Dispatch
const & d )
const
1560 d.vkCreateSemaphore( m_device,
1561 reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ),
1562 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
1563 reinterpret_cast<VkSemaphore *>( &semaphore ) );
1567 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
1573 template <
typename Dispatch>
1579 d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1582 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1583 template <
typename Dispatch>
1590 d.vkDestroySemaphore( m_device,
1591 static_cast<VkSemaphore>( semaphore ),
1592 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1596 template <
typename Dispatch>
1602 d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1605 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1606 template <
typename Dispatch>
1613 d.vkDestroySemaphore( m_device,
1614 static_cast<VkSemaphore>( semaphore ),
1615 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1619 template <
typename Dispatch>
1626 return static_cast<Result>( d.vkCreateEvent( m_device,
1627 reinterpret_cast<const VkEventCreateInfo *>( pCreateInfo ),
1628 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
1629 reinterpret_cast<VkEvent *>( pEvent ) ) );
1632 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1633 template <
typename Dispatch>
1641 d.vkCreateEvent( m_device,
1642 reinterpret_cast<const VkEventCreateInfo *>( &createInfo ),
1643 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
1644 reinterpret_cast<VkEvent *>( &event ) );
1650 # ifndef VULKAN_HPP_NO_SMART_HANDLE
1651 template <
typename Dispatch>
1659 d.vkCreateEvent( m_device,
1660 reinterpret_cast<const VkEventCreateInfo *>( &createInfo ),
1661 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
1662 reinterpret_cast<VkEvent *>( &event ) );
1671 template <
typename Dispatch>
1677 d.vkDestroyEvent( m_device, static_cast<VkEvent>(
event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1680 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1681 template <
typename Dispatch>
1688 d.vkDestroyEvent( m_device,
1689 static_cast<VkEvent>(
event ),
1690 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1694 template <
typename Dispatch>
1700 d.vkDestroyEvent( m_device, static_cast<VkEvent>(
event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1703 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1704 template <
typename Dispatch>
1711 d.vkDestroyEvent( m_device,
1712 static_cast<VkEvent>(
event ),
1713 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1717 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
1718 template <
typename Dispatch>
1722 return static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>(
event ) ) );
1725 template <
typename Dispatch>
1730 VkResult result = d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) );
1731 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
1739 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
1740 template <
typename Dispatch>
1744 return static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>(
event ) ) );
1747 template <
typename Dispatch>
1749 Dispatch
const & d )
const
1753 VkResult result = d.vkSetEvent( m_device, static_cast<VkEvent>( event ) );
1760 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
1761 template <
typename Dispatch>
1765 return static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>(
event ) ) );
1768 template <
typename Dispatch>
1773 VkResult result = d.vkResetEvent( m_device, static_cast<VkEvent>( event ) );
1780 template <
typename Dispatch>
1787 return static_cast<Result>( d.vkCreateQueryPool( m_device,
1788 reinterpret_cast<const VkQueryPoolCreateInfo *>( pCreateInfo ),
1789 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
1790 reinterpret_cast<VkQueryPool *>( pQueryPool ) ) );
1793 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1794 template <
typename Dispatch>
1798 Dispatch
const & d )
const
1804 d.vkCreateQueryPool( m_device,
1805 reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ),
1806 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
1807 reinterpret_cast<VkQueryPool *>( &queryPool ) );
1813 # ifndef VULKAN_HPP_NO_SMART_HANDLE
1814 template <
typename Dispatch>
1818 Dispatch
const & d )
const
1824 d.vkCreateQueryPool( m_device,
1825 reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ),
1826 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
1827 reinterpret_cast<VkQueryPool *>( &queryPool ) );
1831 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
1837 template <
typename Dispatch>
1843 d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1846 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1847 template <
typename Dispatch>
1854 d.vkDestroyQueryPool( m_device,
1855 static_cast<VkQueryPool>( queryPool ),
1856 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1860 template <
typename Dispatch>
1866 d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
1869 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1870 template <
typename Dispatch>
1877 d.vkDestroyQueryPool( m_device,
1878 static_cast<VkQueryPool>( queryPool ),
1879 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
1883 template <
typename Dispatch>
1885 uint32_t firstQuery,
1886 uint32_t queryCount,
1894 return static_cast<Result>( d.vkGetQueryPoolResults( m_device,
1895 static_cast<VkQueryPool>( queryPool ),
1900 static_cast<VkDeviceSize>(
stride ),
1901 static_cast<VkQueryResultFlags>(
flags ) ) );
1904 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1905 template <
typename DataType,
typename DataTypeAllocator,
typename Dispatch>
1908 uint32_t firstQuery,
1909 uint32_t queryCount,
1913 Dispatch
const & d )
const
1918 std::vector<DataType, DataTypeAllocator>
data( dataSize /
sizeof(
DataType ) );
1920 static_cast<VkQueryPool>( queryPool ),
1924 reinterpret_cast<void *>( data.data() ),
1925 static_cast<VkDeviceSize>( stride ),
1927 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
1934 template <
typename DataType,
typename Dispatch>
1936 uint32_t firstQuery,
1937 uint32_t queryCount,
1940 Dispatch
const & d )
const
1946 static_cast<VkQueryPool>( queryPool ),
1950 reinterpret_cast<void *>( &data ),
1951 static_cast<VkDeviceSize>( stride ),
1952 static_cast<VkQueryResultFlags>( flags ) );
1953 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
1961 template <
typename Dispatch>
1968 return static_cast<Result>( d.vkCreateBuffer( m_device,
1969 reinterpret_cast<const VkBufferCreateInfo *>( pCreateInfo ),
1970 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
1971 reinterpret_cast<VkBuffer *>( pBuffer ) ) );
1974 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
1975 template <
typename Dispatch>
1983 d.vkCreateBuffer( m_device,
1984 reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ),
1985 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
1986 reinterpret_cast<VkBuffer *>( &buffer ) );
1992 # ifndef VULKAN_HPP_NO_SMART_HANDLE
1993 template <
typename Dispatch>
2001 d.vkCreateBuffer( m_device,
2002 reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ),
2003 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2004 reinterpret_cast<VkBuffer *>( &buffer ) );
2013 template <
typename Dispatch>
2019 d.vkDestroyBuffer( m_device, static_cast<VkBuffer>(
buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2022 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2023 template <
typename Dispatch>
2030 d.vkDestroyBuffer( m_device,
2031 static_cast<VkBuffer>(
buffer ),
2032 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2036 template <
typename Dispatch>
2042 d.vkDestroyBuffer( m_device, static_cast<VkBuffer>(
buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2045 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2046 template <
typename Dispatch>
2053 d.vkDestroyBuffer( m_device,
2054 static_cast<VkBuffer>(
buffer ),
2055 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2059 template <
typename Dispatch>
2066 return static_cast<Result>( d.vkCreateBufferView( m_device,
2067 reinterpret_cast<const VkBufferViewCreateInfo *>( pCreateInfo ),
2068 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
2069 reinterpret_cast<VkBufferView *>( pView ) ) );
2072 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2073 template <
typename Dispatch>
2077 Dispatch
const & d )
const
2083 d.vkCreateBufferView( m_device,
2084 reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ),
2085 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2086 reinterpret_cast<VkBufferView *>( &view ) );
2092 # ifndef VULKAN_HPP_NO_SMART_HANDLE
2093 template <
typename Dispatch>
2097 Dispatch
const & d )
const
2103 d.vkCreateBufferView( m_device,
2104 reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ),
2105 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2106 reinterpret_cast<VkBufferView *>( &view ) );
2115 template <
typename Dispatch>
2121 d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2124 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2125 template <
typename Dispatch>
2132 d.vkDestroyBufferView( m_device,
2133 static_cast<VkBufferView>( bufferView ),
2134 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2138 template <
typename Dispatch>
2144 d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2147 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2148 template <
typename Dispatch>
2155 d.vkDestroyBufferView( m_device,
2156 static_cast<VkBufferView>( bufferView ),
2157 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2161 template <
typename Dispatch>
2168 return static_cast<Result>( d.vkCreateImage( m_device,
2169 reinterpret_cast<const VkImageCreateInfo *>( pCreateInfo ),
2170 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
2171 reinterpret_cast<VkImage *>( pImage ) ) );
2174 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2175 template <
typename Dispatch>
2183 d.vkCreateImage( m_device,
2184 reinterpret_cast<const VkImageCreateInfo *>( &createInfo ),
2185 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2186 reinterpret_cast<VkImage *>( &image ) );
2192 # ifndef VULKAN_HPP_NO_SMART_HANDLE
2193 template <
typename Dispatch>
2201 d.vkCreateImage( m_device,
2202 reinterpret_cast<const VkImageCreateInfo *>( &createInfo ),
2203 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2204 reinterpret_cast<VkImage *>( &image ) );
2213 template <
typename Dispatch>
2219 d.vkDestroyImage( m_device, static_cast<VkImage>(
image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2222 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2223 template <
typename Dispatch>
2230 d.vkDestroyImage( m_device,
2231 static_cast<VkImage>(
image ),
2232 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2236 template <
typename Dispatch>
2242 d.vkDestroyImage( m_device, static_cast<VkImage>(
image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2245 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2246 template <
typename Dispatch>
2253 d.vkDestroyImage( m_device,
2254 static_cast<VkImage>(
image ),
2255 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2259 template <
typename Dispatch>
2266 d.vkGetImageSubresourceLayout( m_device,
2267 static_cast<VkImage>(
image ),
2268 reinterpret_cast<const VkImageSubresource *>( pSubresource ),
2269 reinterpret_cast<VkSubresourceLayout *>( pLayout ) );
2272 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2273 template <
typename Dispatch>
2280 d.vkGetImageSubresourceLayout( m_device,
2281 static_cast<VkImage>(
image ),
2282 reinterpret_cast<const VkImageSubresource *>( &subresource ),
2283 reinterpret_cast<VkSubresourceLayout *>( &layout ) );
2289 template <
typename Dispatch>
2296 return static_cast<Result>( d.vkCreateImageView( m_device,
2297 reinterpret_cast<const VkImageViewCreateInfo *>( pCreateInfo ),
2298 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
2299 reinterpret_cast<VkImageView *>( pView ) ) );
2302 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2303 template <
typename Dispatch>
2307 Dispatch
const & d )
const
2313 d.vkCreateImageView( m_device,
2314 reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ),
2315 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2316 reinterpret_cast<VkImageView *>( &view ) );
2322 # ifndef VULKAN_HPP_NO_SMART_HANDLE
2323 template <
typename Dispatch>
2327 Dispatch
const & d )
const
2333 d.vkCreateImageView( m_device,
2334 reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ),
2335 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2336 reinterpret_cast<VkImageView *>( &view ) );
2345 template <
typename Dispatch>
2351 d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2354 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2355 template <
typename Dispatch>
2362 d.vkDestroyImageView( m_device,
2363 static_cast<VkImageView>( imageView ),
2364 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2368 template <
typename Dispatch>
2374 d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2377 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2378 template <
typename Dispatch>
2385 d.vkDestroyImageView( m_device,
2386 static_cast<VkImageView>( imageView ),
2387 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2391 template <
typename Dispatch>
2398 return static_cast<Result>( d.vkCreateShaderModule( m_device,
2399 reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ),
2400 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
2401 reinterpret_cast<VkShaderModule *>( pShaderModule ) ) );
2404 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2405 template <
typename Dispatch>
2409 Dispatch
const & d )
const
2415 d.vkCreateShaderModule( m_device,
2416 reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ),
2417 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2418 reinterpret_cast<VkShaderModule *>( &shaderModule ) );
2424 # ifndef VULKAN_HPP_NO_SMART_HANDLE
2425 template <
typename Dispatch>
2429 Dispatch
const & d )
const
2435 d.vkCreateShaderModule( m_device,
2436 reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ),
2437 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2438 reinterpret_cast<VkShaderModule *>( &shaderModule ) );
2442 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
2448 template <
typename Dispatch>
2454 d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2457 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2458 template <
typename Dispatch>
2465 d.vkDestroyShaderModule( m_device,
2466 static_cast<VkShaderModule>( shaderModule ),
2467 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2471 template <
typename Dispatch>
2477 d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2480 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2481 template <
typename Dispatch>
2488 d.vkDestroyShaderModule( m_device,
2489 static_cast<VkShaderModule>( shaderModule ),
2490 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2494 template <
typename Dispatch>
2501 return static_cast<Result>( d.vkCreatePipelineCache( m_device,
2502 reinterpret_cast<const VkPipelineCacheCreateInfo *>( pCreateInfo ),
2503 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
2504 reinterpret_cast<VkPipelineCache *>( pPipelineCache ) ) );
2507 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2508 template <
typename Dispatch>
2512 Dispatch
const & d )
const
2518 d.vkCreatePipelineCache( m_device,
2519 reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ),
2520 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2521 reinterpret_cast<VkPipelineCache *>( &pipelineCache ) );
2527 # ifndef VULKAN_HPP_NO_SMART_HANDLE
2528 template <
typename Dispatch>
2532 Dispatch
const & d )
const
2538 d.vkCreatePipelineCache( m_device,
2539 reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ),
2540 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2541 reinterpret_cast<VkPipelineCache *>( &pipelineCache ) );
2545 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
2551 template <
typename Dispatch>
2557 d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2560 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2561 template <
typename Dispatch>
2568 d.vkDestroyPipelineCache( m_device,
2569 static_cast<VkPipelineCache>( pipelineCache ),
2570 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2574 template <
typename Dispatch>
2580 d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
2583 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2584 template <
typename Dispatch>
2591 d.vkDestroyPipelineCache( m_device,
2592 static_cast<VkPipelineCache>( pipelineCache ),
2593 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
2597 template <
typename Dispatch>
2604 return static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) );
2607 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2608 template <
typename U
int8_tAllocator,
typename Dispatch>
2614 std::vector<uint8_t, Uint8_tAllocator>
data;
2619 result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize,
nullptr );
2622 data.resize( dataSize );
2623 result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
2628 if ( dataSize < data.size() )
2630 data.resize( dataSize );
2635 template <
typename Uint8_tAllocator,
2644 std::vector<uint8_t, Uint8_tAllocator>
data( uint8_tAllocator );
2649 result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize,
nullptr );
2652 data.resize( dataSize );
2653 result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
2658 if ( dataSize < data.size() )
2660 data.resize( dataSize );
2666 template <
typename Dispatch>
2668 uint32_t srcCacheCount,
2673 return static_cast<Result>(
2674 d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCacheCount, reinterpret_cast<const VkPipelineCache *>( pSrcCaches ) ) );
2677 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2678 template <
typename Dispatch>
2682 Dispatch
const & d )
const
2687 m_device, static_cast<VkPipelineCache>( dstCache ), srcCaches.
size(),
reinterpret_cast<const VkPipelineCache *
>( srcCaches.
data() ) );
2694 template <
typename Dispatch>
2696 uint32_t createInfoCount,
2703 return static_cast<Result>( d.vkCreateGraphicsPipelines( m_device,
2704 static_cast<VkPipelineCache>( pipelineCache ),
2706 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( pCreateInfos ),
2707 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
2708 reinterpret_cast<VkPipeline *>( pPipelines ) ) );
2711 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2712 template <
typename PipelineAllocator,
typename Dispatch>
2717 Dispatch
const & d )
const
2721 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>
pipelines( createInfos.
size() );
2724 static_cast<VkPipelineCache>( pipelineCache ),
2727 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2728 reinterpret_cast<VkPipeline *>(
pipelines.data() ) );
2729 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
2736 template <
typename PipelineAllocator,
2744 PipelineAllocator & pipelineAllocator,
2745 Dispatch
const & d )
const
2749 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>
pipelines( createInfos.
size(), pipelineAllocator );
2752 static_cast<VkPipelineCache>( pipelineCache ),
2755 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2756 reinterpret_cast<VkPipeline *>(
pipelines.data() ) );
2757 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
2764 template <
typename Dispatch>
2769 Dispatch
const & d )
const
2776 static_cast<VkPipelineCache>( pipelineCache ),
2778 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ),
2779 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2780 reinterpret_cast<VkPipeline *>( &pipeline ) );
2781 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
2788 # ifndef VULKAN_HPP_NO_SMART_HANDLE
2789 template <
typename Dispatch,
typename PipelineAllocator>
2794 Dispatch
const & d )
const
2798 std::vector<VULKAN_HPP_NAMESPACE::Pipeline>
pipelines( createInfos.
size() );
2801 static_cast<VkPipelineCache>( pipelineCache ),
2804 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2805 reinterpret_cast<VkPipeline *>(
pipelines.data() ) );
2806 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
2809 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
2810 uniquePipelines.reserve( createInfos.
size() );
2812 for (
auto const & pipeline :
pipelines )
2820 template <
typename Dispatch,
2821 typename PipelineAllocator,
2823 typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>
::value,
int>
::type>
2828 PipelineAllocator & pipelineAllocator,
2829 Dispatch
const & d )
const
2833 std::vector<VULKAN_HPP_NAMESPACE::Pipeline>
pipelines( createInfos.
size() );
2836 static_cast<VkPipelineCache>( pipelineCache ),
2839 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2840 reinterpret_cast<VkPipeline *>(
pipelines.data() ) );
2841 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
2844 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
2845 uniquePipelines.reserve( createInfos.
size() );
2847 for (
auto const & pipeline :
pipelines )
2855 template <
typename Dispatch>
2860 Dispatch
const & d )
const
2867 static_cast<VkPipelineCache>( pipelineCache ),
2869 reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ),
2870 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2871 reinterpret_cast<VkPipeline *>( &pipeline ) );
2872 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
2883 template <
typename Dispatch>
2885 uint32_t createInfoCount,
2892 return static_cast<Result>( d.vkCreateComputePipelines( m_device,
2893 static_cast<VkPipelineCache>( pipelineCache ),
2895 reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfos ),
2896 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
2897 reinterpret_cast<VkPipeline *>( pPipelines ) ) );
2900 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
2901 template <
typename PipelineAllocator,
typename Dispatch>
2906 Dispatch
const & d )
const
2910 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>
pipelines( createInfos.
size() );
2913 static_cast<VkPipelineCache>( pipelineCache ),
2916 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2917 reinterpret_cast<VkPipeline *>(
pipelines.data() ) );
2918 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
2925 template <
typename PipelineAllocator,
2933 PipelineAllocator & pipelineAllocator,
2934 Dispatch
const & d )
const
2938 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>
pipelines( createInfos.
size(), pipelineAllocator );
2941 static_cast<VkPipelineCache>( pipelineCache ),
2944 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2945 reinterpret_cast<VkPipeline *>(
pipelines.data() ) );
2946 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
2953 template <
typename Dispatch>
2958 Dispatch
const & d )
const
2965 static_cast<VkPipelineCache>( pipelineCache ),
2967 reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ),
2968 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2969 reinterpret_cast<VkPipeline *>( &pipeline ) );
2970 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
2977 # ifndef VULKAN_HPP_NO_SMART_HANDLE
2978 template <
typename Dispatch,
typename PipelineAllocator>
2983 Dispatch
const & d )
const
2987 std::vector<VULKAN_HPP_NAMESPACE::Pipeline>
pipelines( createInfos.
size() );
2990 static_cast<VkPipelineCache>( pipelineCache ),
2993 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
2994 reinterpret_cast<VkPipeline *>(
pipelines.data() ) );
2995 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
2998 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
2999 uniquePipelines.reserve( createInfos.
size() );
3001 for (
auto const & pipeline :
pipelines )
3009 template <
typename Dispatch,
3010 typename PipelineAllocator,
3012 typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>
::value,
int>
::type>
3017 PipelineAllocator & pipelineAllocator,
3018 Dispatch
const & d )
const
3022 std::vector<VULKAN_HPP_NAMESPACE::Pipeline>
pipelines( createInfos.
size() );
3025 static_cast<VkPipelineCache>( pipelineCache ),
3028 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3029 reinterpret_cast<VkPipeline *>(
pipelines.data() ) );
3030 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
3033 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
3034 uniquePipelines.reserve( createInfos.
size() );
3036 for (
auto const & pipeline :
pipelines )
3044 template <
typename Dispatch>
3049 Dispatch
const & d )
const
3056 static_cast<VkPipelineCache>( pipelineCache ),
3058 reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ),
3059 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3060 reinterpret_cast<VkPipeline *>( &pipeline ) );
3061 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
3072 template <
typename Dispatch>
3078 d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3081 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3082 template <
typename Dispatch>
3089 d.vkDestroyPipeline( m_device,
3090 static_cast<VkPipeline>( pipeline ),
3091 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3095 template <
typename Dispatch>
3101 d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3104 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3105 template <
typename Dispatch>
3112 d.vkDestroyPipeline( m_device,
3113 static_cast<VkPipeline>( pipeline ),
3114 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3118 template <
typename Dispatch>
3125 return static_cast<Result>( d.vkCreatePipelineLayout( m_device,
3126 reinterpret_cast<const VkPipelineLayoutCreateInfo *>( pCreateInfo ),
3127 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
3128 reinterpret_cast<VkPipelineLayout *>( pPipelineLayout ) ) );
3131 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3132 template <
typename Dispatch>
3136 Dispatch
const & d )
const
3142 d.vkCreatePipelineLayout( m_device,
3143 reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ),
3144 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3145 reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) );
3148 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelineLayout );
3151 # ifndef VULKAN_HPP_NO_SMART_HANDLE
3152 template <
typename Dispatch>
3156 Dispatch
const & d )
const
3162 d.vkCreatePipelineLayout( m_device,
3163 reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ),
3164 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3165 reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) );
3169 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
3175 template <
typename Dispatch>
3181 d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3184 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3185 template <
typename Dispatch>
3192 d.vkDestroyPipelineLayout( m_device,
3193 static_cast<VkPipelineLayout>( pipelineLayout ),
3194 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3198 template <
typename Dispatch>
3204 d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3207 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3208 template <
typename Dispatch>
3215 d.vkDestroyPipelineLayout( m_device,
3216 static_cast<VkPipelineLayout>( pipelineLayout ),
3217 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3221 template <
typename Dispatch>
3228 return static_cast<Result>( d.vkCreateSampler( m_device,
3229 reinterpret_cast<const VkSamplerCreateInfo *>( pCreateInfo ),
3230 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
3231 reinterpret_cast<VkSampler *>( pSampler ) ) );
3234 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3235 template <
typename Dispatch>
3243 d.vkCreateSampler( m_device,
3244 reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ),
3245 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3246 reinterpret_cast<VkSampler *>( &sampler ) );
3252 # ifndef VULKAN_HPP_NO_SMART_HANDLE
3253 template <
typename Dispatch>
3261 d.vkCreateSampler( m_device,
3262 reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ),
3263 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3264 reinterpret_cast<VkSampler *>( &sampler ) );
3273 template <
typename Dispatch>
3279 d.vkDestroySampler( m_device, static_cast<VkSampler>(
sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3282 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3283 template <
typename Dispatch>
3290 d.vkDestroySampler( m_device,
3291 static_cast<VkSampler>(
sampler ),
3292 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3296 template <
typename Dispatch>
3302 d.vkDestroySampler( m_device, static_cast<VkSampler>(
sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3305 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3306 template <
typename Dispatch>
3313 d.vkDestroySampler( m_device,
3314 static_cast<VkSampler>(
sampler ),
3315 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3319 template <
typename Dispatch>
3326 return static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device,
3327 reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ),
3328 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
3329 reinterpret_cast<VkDescriptorSetLayout *>( pSetLayout ) ) );
3332 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3333 template <
typename Dispatch>
3337 Dispatch
const & d )
const
3344 reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
3345 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3346 reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) );
3352 # ifndef VULKAN_HPP_NO_SMART_HANDLE
3353 template <
typename Dispatch>
3357 Dispatch
const & d )
const
3364 reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
3365 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3366 reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) );
3370 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
3376 template <
typename Dispatch>
3382 d.vkDestroyDescriptorSetLayout(
3383 m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3386 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3387 template <
typename Dispatch>
3394 d.vkDestroyDescriptorSetLayout(
3396 static_cast<VkDescriptorSetLayout>( descriptorSetLayout ),
3397 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3401 template <
typename Dispatch>
3407 d.vkDestroyDescriptorSetLayout(
3408 m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3411 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3412 template <
typename Dispatch>
3419 d.vkDestroyDescriptorSetLayout(
3421 static_cast<VkDescriptorSetLayout>( descriptorSetLayout ),
3422 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3426 template <
typename Dispatch>
3433 return static_cast<Result>( d.vkCreateDescriptorPool( m_device,
3434 reinterpret_cast<const VkDescriptorPoolCreateInfo *>( pCreateInfo ),
3435 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
3436 reinterpret_cast<VkDescriptorPool *>( pDescriptorPool ) ) );
3439 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3440 template <
typename Dispatch>
3444 Dispatch
const & d )
const
3450 d.vkCreateDescriptorPool( m_device,
3451 reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ),
3452 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3453 reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) );
3456 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorPool );
3459 # ifndef VULKAN_HPP_NO_SMART_HANDLE
3460 template <
typename Dispatch>
3464 Dispatch
const & d )
const
3470 d.vkCreateDescriptorPool( m_device,
3471 reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ),
3472 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3473 reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) );
3477 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
3483 template <
typename Dispatch>
3489 d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3492 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3493 template <
typename Dispatch>
3500 d.vkDestroyDescriptorPool( m_device,
3501 static_cast<VkDescriptorPool>( descriptorPool ),
3502 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3506 template <
typename Dispatch>
3512 d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3515 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3516 template <
typename Dispatch>
3523 d.vkDestroyDescriptorPool( m_device,
3524 static_cast<VkDescriptorPool>( descriptorPool ),
3525 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3529 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
3530 template <
typename Dispatch>
3536 return static_cast<Result>(
3537 d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>(
flags ) ) );
3540 template <
typename Dispatch>
3547 d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>(
flags ) );
3551 template <
typename Dispatch>
3557 return static_cast<Result>( d.vkAllocateDescriptorSets(
3558 m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( pAllocateInfo ), reinterpret_cast<VkDescriptorSet *>( pDescriptorSets ) ) );
3561 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3562 template <
typename DescriptorSetAllocator,
typename Dispatch>
3568 std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.
descriptorSetCount );
3570 m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
3573 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorSets );
3576 template <
typename DescriptorSetAllocator,
3582 DescriptorSetAllocator & descriptorSetAllocator,
3583 Dispatch
const & d )
const
3587 std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.
descriptorSetCount, descriptorSetAllocator );
3589 m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
3592 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorSets );
3595 # ifndef VULKAN_HPP_NO_SMART_HANDLE
3596 template <
typename Dispatch,
typename DescriptorSetAllocator>
3603 std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets( allocateInfo.
descriptorSetCount );
3605 m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
3607 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets;
3610 for (
auto const & descriptorSet : descriptorSets )
3614 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueDescriptorSets ) );
3617 template <
typename Dispatch,
3618 typename DescriptorSetAllocator,
3620 typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<DescriptorSet, Dispatch>>
::value,
int>
::type>
3624 DescriptorSetAllocator & descriptorSetAllocator,
3625 Dispatch
const & d )
const
3629 std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets( allocateInfo.
descriptorSetCount );
3631 m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
3633 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets( descriptorSetAllocator );
3636 for (
auto const & descriptorSet : descriptorSets )
3640 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueDescriptorSets ) );
3645 template <
typename Dispatch>
3647 uint32_t descriptorSetCount,
3652 return static_cast<Result>( d.vkFreeDescriptorSets(
3653 m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
3656 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3657 template <
typename Dispatch>
3664 d.vkFreeDescriptorSets(
3665 m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(),
reinterpret_cast<const VkDescriptorSet *
>( descriptorSets.data() ) );
3669 template <
typename Dispatch>
3671 uint32_t descriptorSetCount,
3676 return static_cast<Result>( d.vkFreeDescriptorSets(
3677 m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
3680 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3681 template <
typename Dispatch>
3688 d.vkFreeDescriptorSets(
3689 m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(),
reinterpret_cast<const VkDescriptorSet *
>( descriptorSets.data() ) );
3693 template <
typename Dispatch>
3696 uint32_t descriptorCopyCount,
3701 d.vkUpdateDescriptorSets( m_device,
3702 descriptorWriteCount,
3703 reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ),
3704 descriptorCopyCount,
3705 reinterpret_cast<const VkCopyDescriptorSet *>( pDescriptorCopies ) );
3708 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3709 template <
typename Dispatch>
3717 d.vkUpdateDescriptorSets( m_device,
3718 descriptorWrites.size(),
3720 descriptorCopies.size(),
3725 template <
typename Dispatch>
3732 return static_cast<Result>( d.vkCreateFramebuffer( m_device,
3733 reinterpret_cast<const VkFramebufferCreateInfo *>( pCreateInfo ),
3734 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
3735 reinterpret_cast<VkFramebuffer *>( pFramebuffer ) ) );
3738 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3739 template <
typename Dispatch>
3743 Dispatch
const & d )
const
3749 d.vkCreateFramebuffer( m_device,
3750 reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ),
3751 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3752 reinterpret_cast<VkFramebuffer *>( &framebuffer ) );
3758 # ifndef VULKAN_HPP_NO_SMART_HANDLE
3759 template <
typename Dispatch>
3763 Dispatch
const & d )
const
3769 d.vkCreateFramebuffer( m_device,
3770 reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ),
3771 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3772 reinterpret_cast<VkFramebuffer *>( &framebuffer ) );
3776 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
3782 template <
typename Dispatch>
3788 d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>(
framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3791 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3792 template <
typename Dispatch>
3799 d.vkDestroyFramebuffer( m_device,
3801 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3805 template <
typename Dispatch>
3811 d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>(
framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3814 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3815 template <
typename Dispatch>
3822 d.vkDestroyFramebuffer( m_device,
3824 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3828 template <
typename Dispatch>
3835 return static_cast<Result>( d.vkCreateRenderPass( m_device,
3836 reinterpret_cast<const VkRenderPassCreateInfo *>( pCreateInfo ),
3837 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
3838 reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
3841 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3842 template <
typename Dispatch>
3846 Dispatch
const & d )
const
3852 d.vkCreateRenderPass( m_device,
3853 reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ),
3854 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3855 reinterpret_cast<VkRenderPass *>( &renderPass ) );
3861 # ifndef VULKAN_HPP_NO_SMART_HANDLE
3862 template <
typename Dispatch>
3866 Dispatch
const & d )
const
3872 d.vkCreateRenderPass( m_device,
3873 reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ),
3874 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3875 reinterpret_cast<VkRenderPass *>( &renderPass ) );
3879 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
3885 template <
typename Dispatch>
3891 d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3894 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3895 template <
typename Dispatch>
3902 d.vkDestroyRenderPass( m_device,
3903 static_cast<VkRenderPass>( renderPass ),
3904 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3908 template <
typename Dispatch>
3914 d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
3917 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3918 template <
typename Dispatch>
3925 d.vkDestroyRenderPass( m_device,
3926 static_cast<VkRenderPass>( renderPass ),
3927 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
3931 template <
typename Dispatch>
3937 d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( pGranularity ) );
3940 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3941 template <
typename Dispatch>
3948 d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( &granularity ) );
3954 template <
typename Dispatch>
3961 return static_cast<Result>( d.vkCreateCommandPool( m_device,
3962 reinterpret_cast<const VkCommandPoolCreateInfo *>( pCreateInfo ),
3963 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
3964 reinterpret_cast<VkCommandPool *>( pCommandPool ) ) );
3967 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
3968 template <
typename Dispatch>
3972 Dispatch
const & d )
const
3978 d.vkCreateCommandPool( m_device,
3979 reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ),
3980 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
3981 reinterpret_cast<VkCommandPool *>( &commandPool ) );
3987 # ifndef VULKAN_HPP_NO_SMART_HANDLE
3988 template <
typename Dispatch>
3992 Dispatch
const & d )
const
3998 d.vkCreateCommandPool( m_device,
3999 reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ),
4000 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
4001 reinterpret_cast<VkCommandPool *>( &commandPool ) );
4005 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
4011 template <
typename Dispatch>
4017 d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
4020 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4021 template <
typename Dispatch>
4028 d.vkDestroyCommandPool( m_device,
4029 static_cast<VkCommandPool>( commandPool ),
4030 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
4034 template <
typename Dispatch>
4040 d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
4043 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4044 template <
typename Dispatch>
4051 d.vkDestroyCommandPool( m_device,
4052 static_cast<VkCommandPool>( commandPool ),
4053 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
4057 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
4058 template <
typename Dispatch>
4064 return static_cast<Result>( d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>(
flags ) ) );
4067 template <
typename Dispatch>
4073 VkResult result = d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) );
4080 template <
typename Dispatch>
4086 return static_cast<Result>( d.vkAllocateCommandBuffers(
4087 m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( pAllocateInfo ), reinterpret_cast<VkCommandBuffer *>( pCommandBuffers ) ) );
4090 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4091 template <
typename CommandBufferAllocator,
typename Dispatch>
4097 std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.
commandBufferCount );
4099 m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
4102 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), commandBuffers );
4105 template <
typename CommandBufferAllocator,
4111 CommandBufferAllocator & commandBufferAllocator,
4112 Dispatch
const & d )
const
4116 std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.
commandBufferCount, commandBufferAllocator );
4118 m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
4121 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), commandBuffers );
4124 # ifndef VULKAN_HPP_NO_SMART_HANDLE
4125 template <
typename Dispatch,
typename CommandBufferAllocator>
4132 std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers( allocateInfo.
commandBufferCount );
4134 m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
4136 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers;
4139 for (
auto const & commandBuffer : commandBuffers )
4143 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueCommandBuffers ) );
4146 template <
typename Dispatch,
4147 typename CommandBufferAllocator,
4149 typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<CommandBuffer, Dispatch>>
::value,
int>
::type>
4153 CommandBufferAllocator & commandBufferAllocator,
4154 Dispatch
const & d )
const
4158 std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers( allocateInfo.
commandBufferCount );
4160 m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
4162 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers( commandBufferAllocator );
4165 for (
auto const & commandBuffer : commandBuffers )
4169 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueCommandBuffers ) );
4174 template <
typename Dispatch>
4176 uint32_t commandBufferCount,
4181 d.vkFreeCommandBuffers(
4182 m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
4185 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4186 template <
typename Dispatch>
4193 d.vkFreeCommandBuffers(
4194 m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(),
reinterpret_cast<const VkCommandBuffer *
>( commandBuffers.data() ) );
4198 template <
typename Dispatch>
4200 uint32_t commandBufferCount,
4205 d.vkFreeCommandBuffers(
4206 m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
4209 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4210 template <
typename Dispatch>
4217 d.vkFreeCommandBuffers(
4218 m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(),
reinterpret_cast<const VkCommandBuffer *
>( commandBuffers.data() ) );
4222 template <
typename Dispatch>
4227 return static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( pBeginInfo ) ) );
4230 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4231 template <
typename Dispatch>
4237 VkResult result = d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( &beginInfo ) );
4244 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
4245 template <
typename Dispatch>
4249 return static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
4252 template <
typename Dispatch>
4264 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
4265 template <
typename Dispatch>
4270 return static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>(
flags ) ) );
4273 template <
typename Dispatch>
4278 VkResult result = d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) );
4285 template <
typename Dispatch>
4291 d.vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
4294 template <
typename Dispatch>
4296 uint32_t viewportCount,
4301 d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
4304 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4305 template <
typename Dispatch>
4312 d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size(),
reinterpret_cast<const VkViewport *
>( viewports.data() ) );
4316 template <
typename Dispatch>
4318 uint32_t scissorCount,
4323 d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
4326 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4327 template <
typename Dispatch>
4334 d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size(),
reinterpret_cast<const VkRect2D *
>( scissors.data() ) );
4338 template <
typename Dispatch>
4342 d.vkCmdSetLineWidth( m_commandBuffer, lineWidth );
4345 template <
typename Dispatch>
4350 d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
4353 template <
typename Dispatch>
4357 d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
4360 template <
typename Dispatch>
4364 d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
4367 template <
typename Dispatch>
4372 d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
4375 template <
typename Dispatch>
4380 d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
4383 template <
typename Dispatch>
4388 d.vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ),
reference );
4391 template <
typename Dispatch>
4395 uint32_t descriptorSetCount,
4397 uint32_t dynamicOffsetCount,
4398 const uint32_t * pDynamicOffsets,
4402 d.vkCmdBindDescriptorSets( m_commandBuffer,
4403 static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
4404 static_cast<VkPipelineLayout>( layout ),
4407 reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ),
4412 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4413 template <
typename Dispatch>
4423 d.vkCmdBindDescriptorSets( m_commandBuffer,
4424 static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
4425 static_cast<VkPipelineLayout>( layout ),
4427 descriptorSets.size(),
4428 reinterpret_cast<const VkDescriptorSet *
>( descriptorSets.data() ),
4429 dynamicOffsets.size(),
4430 dynamicOffsets.data() );
4434 template <
typename Dispatch>
4441 d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>(
buffer ), static_cast<VkDeviceSize>(
offset ), static_cast<VkIndexType>( indexType ) );
4444 template <
typename Dispatch>
4446 uint32_t bindingCount,
4452 d.vkCmdBindVertexBuffers(
4453 m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), reinterpret_cast<const VkDeviceSize *>( pOffsets ) );
4456 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4457 template <
typename Dispatch>
4464 # ifdef VULKAN_HPP_NO_EXCEPTIONS
4473 d.vkCmdBindVertexBuffers( m_commandBuffer,
4476 reinterpret_cast<const VkBuffer *
>(
buffers.data() ),
4477 reinterpret_cast<const VkDeviceSize *>(
offsets.data() ) );
4481 template <
typename Dispatch>
4483 uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch
const & d )
const VULKAN_HPP_NOEXCEPT
4486 d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
4489 template <
typename Dispatch>
4491 uint32_t instanceCount,
4492 uint32_t firstIndex,
4493 int32_t vertexOffset,
4494 uint32_t firstInstance,
4498 d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
4501 template <
typename Dispatch>
4509 d.vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>(
buffer ), static_cast<VkDeviceSize>(
offset ), drawCount,
stride );
4512 template <
typename Dispatch>
4520 d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>(
buffer ), static_cast<VkDeviceSize>(
offset ), drawCount,
stride );
4523 template <
typename Dispatch>
4528 d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
4531 template <
typename Dispatch>
4537 d.vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>(
buffer ), static_cast<VkDeviceSize>(
offset ) );
4540 template <
typename Dispatch>
4543 uint32_t regionCount,
4548 d.vkCmdCopyBuffer( m_commandBuffer,
4549 static_cast<VkBuffer>( srcBuffer ),
4550 static_cast<VkBuffer>( dstBuffer ),
4552 reinterpret_cast<const VkBufferCopy *>( pRegions ) );
4555 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4556 template <
typename Dispatch>
4564 d.vkCmdCopyBuffer( m_commandBuffer,
4565 static_cast<VkBuffer>( srcBuffer ),
4566 static_cast<VkBuffer>( dstBuffer ),
4568 reinterpret_cast<const VkBufferCopy *
>( regions.data() ) );
4572 template <
typename Dispatch>
4577 uint32_t regionCount,
4582 d.vkCmdCopyImage( m_commandBuffer,
4583 static_cast<VkImage>( srcImage ),
4584 static_cast<VkImageLayout>( srcImageLayout ),
4585 static_cast<VkImage>( dstImage ),
4586 static_cast<VkImageLayout>( dstImageLayout ),
4588 reinterpret_cast<const VkImageCopy *>( pRegions ) );
4591 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4592 template <
typename Dispatch>
4602 d.vkCmdCopyImage( m_commandBuffer,
4603 static_cast<VkImage>( srcImage ),
4604 static_cast<VkImageLayout>( srcImageLayout ),
4605 static_cast<VkImage>( dstImage ),
4606 static_cast<VkImageLayout>( dstImageLayout ),
4608 reinterpret_cast<const VkImageCopy *
>( regions.data() ) );
4612 template <
typename Dispatch>
4617 uint32_t regionCount,
4623 d.vkCmdBlitImage( m_commandBuffer,
4624 static_cast<VkImage>( srcImage ),
4625 static_cast<VkImageLayout>( srcImageLayout ),
4626 static_cast<VkImage>( dstImage ),
4627 static_cast<VkImageLayout>( dstImageLayout ),
4629 reinterpret_cast<const VkImageBlit *>( pRegions ),
4630 static_cast<VkFilter>(
filter ) );
4633 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4634 template <
typename Dispatch>
4645 d.vkCmdBlitImage( m_commandBuffer,
4646 static_cast<VkImage>( srcImage ),
4647 static_cast<VkImageLayout>( srcImageLayout ),
4648 static_cast<VkImage>( dstImage ),
4649 static_cast<VkImageLayout>( dstImageLayout ),
4651 reinterpret_cast<const VkImageBlit *
>( regions.data() ),
4652 static_cast<VkFilter>(
filter ) );
4656 template <
typename Dispatch>
4660 uint32_t regionCount,
4665 d.vkCmdCopyBufferToImage( m_commandBuffer,
4666 static_cast<VkBuffer>( srcBuffer ),
4667 static_cast<VkImage>( dstImage ),
4668 static_cast<VkImageLayout>( dstImageLayout ),
4670 reinterpret_cast<const VkBufferImageCopy *>( pRegions ) );
4673 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4674 template <
typename Dispatch>
4683 d.vkCmdCopyBufferToImage( m_commandBuffer,
4684 static_cast<VkBuffer>( srcBuffer ),
4685 static_cast<VkImage>( dstImage ),
4686 static_cast<VkImageLayout>( dstImageLayout ),
4692 template <
typename Dispatch>
4696 uint32_t regionCount,
4701 d.vkCmdCopyImageToBuffer( m_commandBuffer,
4702 static_cast<VkImage>( srcImage ),
4703 static_cast<VkImageLayout>( srcImageLayout ),
4704 static_cast<VkBuffer>( dstBuffer ),
4706 reinterpret_cast<const VkBufferImageCopy *>( pRegions ) );
4709 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4710 template <
typename Dispatch>
4719 d.vkCmdCopyImageToBuffer( m_commandBuffer,
4720 static_cast<VkImage>( srcImage ),
4721 static_cast<VkImageLayout>( srcImageLayout ),
4722 static_cast<VkBuffer>( dstBuffer ),
4728 template <
typename Dispatch>
4736 d.vkCmdUpdateBuffer(
4737 m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( dataSize ), pData );
4740 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4741 template <
typename DataType,
typename Dispatch>
4749 d.vkCmdUpdateBuffer( m_commandBuffer,
4750 static_cast<VkBuffer>( dstBuffer ),
4751 static_cast<VkDeviceSize>( dstOffset ),
4753 reinterpret_cast<const void *>(
data.data() ) );
4757 template <
typename Dispatch>
4765 d.vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>(
size ),
data );
4768 template <
typename Dispatch>
4772 uint32_t rangeCount,
4777 d.vkCmdClearColorImage( m_commandBuffer,
4778 static_cast<VkImage>(
image ),
4779 static_cast<VkImageLayout>( imageLayout ),
4780 reinterpret_cast<const VkClearColorValue *>( pColor ),
4782 reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) );
4785 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4786 template <
typename Dispatch>
4795 d.vkCmdClearColorImage( m_commandBuffer,
4796 static_cast<VkImage>(
image ),
4797 static_cast<VkImageLayout>( imageLayout ),
4798 reinterpret_cast<const VkClearColorValue *>( &
color ),
4804 template <
typename Dispatch>
4808 uint32_t rangeCount,
4813 d.vkCmdClearDepthStencilImage( m_commandBuffer,
4814 static_cast<VkImage>(
image ),
4815 static_cast<VkImageLayout>( imageLayout ),
4816 reinterpret_cast<const VkClearDepthStencilValue *>( pDepthStencil ),
4818 reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) );
4821 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4822 template <
typename Dispatch>
4832 d.vkCmdClearDepthStencilImage( m_commandBuffer,
4833 static_cast<VkImage>(
image ),
4834 static_cast<VkImageLayout>( imageLayout ),
4835 reinterpret_cast<const VkClearDepthStencilValue *>( &depthStencil ),
4841 template <
typename Dispatch>
4849 d.vkCmdClearAttachments( m_commandBuffer,
4851 reinterpret_cast<const VkClearAttachment *>( pAttachments ),
4853 reinterpret_cast<const VkClearRect *>( pRects ) );
4856 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4857 template <
typename Dispatch>
4864 d.vkCmdClearAttachments( m_commandBuffer,
4868 reinterpret_cast<const VkClearRect *
>( rects.data() ) );
4872 template <
typename Dispatch>
4877 uint32_t regionCount,
4882 d.vkCmdResolveImage( m_commandBuffer,
4883 static_cast<VkImage>( srcImage ),
4884 static_cast<VkImageLayout>( srcImageLayout ),
4885 static_cast<VkImage>( dstImage ),
4886 static_cast<VkImageLayout>( dstImageLayout ),
4888 reinterpret_cast<const VkImageResolve *>( pRegions ) );
4891 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4892 template <
typename Dispatch>
4902 d.vkCmdResolveImage( m_commandBuffer,
4903 static_cast<VkImage>( srcImage ),
4904 static_cast<VkImageLayout>( srcImageLayout ),
4905 static_cast<VkImage>( dstImage ),
4906 static_cast<VkImageLayout>( dstImageLayout ),
4912 template <
typename Dispatch>
4918 d.vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>(
event ), static_cast<VkPipelineStageFlags>( stageMask ) );
4921 template <
typename Dispatch>
4927 d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>(
event ), static_cast<VkPipelineStageFlags>( stageMask ) );
4930 template <
typename Dispatch>
4935 uint32_t memoryBarrierCount,
4937 uint32_t bufferMemoryBarrierCount,
4939 uint32_t imageMemoryBarrierCount,
4944 d.vkCmdWaitEvents( m_commandBuffer,
4946 reinterpret_cast<const VkEvent *>( pEvents ),
4947 static_cast<VkPipelineStageFlags>( srcStageMask ),
4948 static_cast<VkPipelineStageFlags>( dstStageMask ),
4950 reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ),
4951 bufferMemoryBarrierCount,
4952 reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ),
4953 imageMemoryBarrierCount,
4954 reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) );
4957 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
4958 template <
typename Dispatch>
4970 d.vkCmdWaitEvents( m_commandBuffer,
4972 reinterpret_cast<const VkEvent *
>( events.data() ),
4973 static_cast<VkPipelineStageFlags>( srcStageMask ),
4975 memoryBarriers.size(),
4977 bufferMemoryBarriers.size(),
4979 imageMemoryBarriers.
size(),
4984 template <
typename Dispatch>
4988 uint32_t memoryBarrierCount,
4990 uint32_t bufferMemoryBarrierCount,
4992 uint32_t imageMemoryBarrierCount,
4997 d.vkCmdPipelineBarrier( m_commandBuffer,
4998 static_cast<VkPipelineStageFlags>( srcStageMask ),
4999 static_cast<VkPipelineStageFlags>( dstStageMask ),
5000 static_cast<VkDependencyFlags>( dependencyFlags ),
5002 reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ),
5003 bufferMemoryBarrierCount,
5004 reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ),
5005 imageMemoryBarrierCount,
5006 reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) );
5009 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5010 template <
typename Dispatch>
5022 d.vkCmdPipelineBarrier( m_commandBuffer,
5023 static_cast<VkPipelineStageFlags>( srcStageMask ),
5024 static_cast<VkPipelineStageFlags>( dstStageMask ),
5025 static_cast<VkDependencyFlags>( dependencyFlags ),
5026 memoryBarriers.size(),
5028 bufferMemoryBarriers.size(),
5030 imageMemoryBarriers.
size(),
5035 template <
typename Dispatch>
5042 d.vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ),
query, static_cast<VkQueryControlFlags>(
flags ) );
5045 template <
typename Dispatch>
5049 d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ),
query );
5052 template <
typename Dispatch>
5054 uint32_t firstQuery,
5055 uint32_t queryCount,
5059 d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
5062 template <
typename Dispatch>
5069 d.vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ),
query );
5072 template <
typename Dispatch>
5074 uint32_t firstQuery,
5075 uint32_t queryCount,
5083 d.vkCmdCopyQueryPoolResults( m_commandBuffer,
5084 static_cast<VkQueryPool>( queryPool ),
5087 static_cast<VkBuffer>( dstBuffer ),
5088 static_cast<VkDeviceSize>( dstOffset ),
5089 static_cast<VkDeviceSize>(
stride ),
5090 static_cast<VkQueryResultFlags>(
flags ) );
5093 template <
typename Dispatch>
5098 const void * pValues,
5102 d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ),
offset,
size, pValues );
5105 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5106 template <
typename ValuesType,
typename Dispatch>
5115 d.vkCmdPushConstants( m_commandBuffer,
5116 static_cast<VkPipelineLayout>( layout ),
5117 static_cast<VkShaderStageFlags>( stageFlags ),
5119 values.size() *
sizeof( ValuesType ),
5120 reinterpret_cast<const void *>(
values.data() ) );
5124 template <
typename Dispatch>
5130 d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), static_cast<VkSubpassContents>( contents ) );
5133 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5134 template <
typename Dispatch>
5141 d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) );
5145 template <
typename Dispatch>
5149 d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
5152 template <
typename Dispatch>
5156 d.vkCmdEndRenderPass( m_commandBuffer );
5159 template <
typename Dispatch>
5165 d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
5168 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5169 template <
typename Dispatch>
5175 d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size(),
reinterpret_cast<const VkCommandBuffer *
>( commandBuffers.data() ) );
5181 template <
typename Dispatch>
5185 return static_cast<Result>( d.vkEnumerateInstanceVersion( pApiVersion ) );
5188 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5189 template <
typename Dispatch>
5194 uint32_t apiVersion;
5202 template <
typename Dispatch>
5208 return static_cast<Result>( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
5211 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5212 template <
typename Dispatch>
5225 template <
typename Dispatch>
5231 return static_cast<Result>( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
5234 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5235 template <
typename Dispatch>
5248 template <
typename Dispatch>
5250 uint32_t localDeviceIndex,
5251 uint32_t remoteDeviceIndex,
5256 d.vkGetDeviceGroupPeerMemoryFeatures(
5257 m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
5260 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5261 template <
typename Dispatch>
5263 uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch
const & d )
const VULKAN_HPP_NOEXCEPT
5268 d.vkGetDeviceGroupPeerMemoryFeatures(
5269 m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
5271 return peerMemoryFeatures;
5275 template <
typename Dispatch>
5279 d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask );
5282 template <
typename Dispatch>
5284 uint32_t baseGroupY,
5285 uint32_t baseGroupZ,
5286 uint32_t groupCountX,
5287 uint32_t groupCountY,
5288 uint32_t groupCountZ,
5292 d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
5295 template <
typename Dispatch>
5302 return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups(
5303 m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) );
5306 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5307 template <
typename PhysicalDeviceGroupPropertiesAllocator,
typename Dispatch>
5314 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties;
5315 uint32_t physicalDeviceGroupCount;
5319 result = d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount,
nullptr );
5320 if ( ( result ==
VK_SUCCESS ) && physicalDeviceGroupCount )
5322 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
5323 result = d.vkEnumeratePhysicalDeviceGroups(
5324 m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
5328 VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
5329 if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
5331 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
5333 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties );
5336 template <
typename PhysicalDeviceGroupPropertiesAllocator,
5346 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties(
5347 physicalDeviceGroupPropertiesAllocator );
5348 uint32_t physicalDeviceGroupCount;
5352 result = d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount,
nullptr );
5353 if ( ( result ==
VK_SUCCESS ) && physicalDeviceGroupCount )
5355 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
5356 result = d.vkEnumeratePhysicalDeviceGroups(
5357 m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
5361 VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
5362 if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
5364 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
5366 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties );
5370 template <
typename Dispatch>
5376 d.vkGetImageMemoryRequirements2(
5377 m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
5380 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5381 template <
typename Dispatch>
5388 d.vkGetImageMemoryRequirements2(
5389 m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
5391 return memoryRequirements;
5394 template <
typename X,
typename Y,
typename...
Z,
typename Dispatch>
5402 d.vkGetImageMemoryRequirements2(
5403 m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
5405 return structureChain;
5409 template <
typename Dispatch>
5415 d.vkGetBufferMemoryRequirements2(
5416 m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
5419 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5420 template <
typename Dispatch>
5427 d.vkGetBufferMemoryRequirements2(
5428 m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
5430 return memoryRequirements;
5433 template <
typename X,
typename Y,
typename...
Z,
typename Dispatch>
5441 d.vkGetBufferMemoryRequirements2(
5442 m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
5444 return structureChain;
5448 template <
typename Dispatch>
5450 uint32_t * pSparseMemoryRequirementCount,
5455 d.vkGetImageSparseMemoryRequirements2( m_device,
5456 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ),
5457 pSparseMemoryRequirementCount,
5458 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
5461 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5462 template <
typename SparseImageMemoryRequirements2Allocator,
typename Dispatch>
5468 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
5469 uint32_t sparseMemoryRequirementCount;
5470 d.vkGetImageSparseMemoryRequirements2(
5471 m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount,
nullptr );
5472 sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
5473 d.vkGetImageSparseMemoryRequirements2( m_device,
5474 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
5475 &sparseMemoryRequirementCount,
5476 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
5478 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
5479 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
5481 sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
5483 return sparseMemoryRequirements;
5486 template <
typename SparseImageMemoryRequirements2Allocator,
5492 SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
5493 Dispatch
const & d )
const
5497 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
5498 sparseImageMemoryRequirements2Allocator );
5499 uint32_t sparseMemoryRequirementCount;
5500 d.vkGetImageSparseMemoryRequirements2(
5501 m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount,
nullptr );
5502 sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
5503 d.vkGetImageSparseMemoryRequirements2( m_device,
5504 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
5505 &sparseMemoryRequirementCount,
5506 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
5508 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
5509 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
5511 sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
5513 return sparseMemoryRequirements;
5517 template <
typename Dispatch>
5521 d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) );
5524 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5525 template <
typename Dispatch>
5532 d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
5537 template <
typename X,
typename Y,
typename...
Z,
typename Dispatch>
5544 d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
5546 return structureChain;
5550 template <
typename Dispatch>
5555 d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) );
5558 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5559 template <
typename Dispatch>
5566 d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
5571 template <
typename X,
typename Y,
typename...
Z,
typename Dispatch>
5578 d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
5580 return structureChain;
5584 template <
typename Dispatch>
5590 d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>(
format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) );
5593 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5594 template <
typename Dispatch>
5601 d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>(
format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
5603 return formatProperties;
5606 template <
typename X,
typename Y,
typename...
Z,
typename Dispatch>
5614 d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>(
format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
5616 return structureChain;
5620 template <
typename Dispatch>
5627 return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice,
5628 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ),
5629 reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) );
5632 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5633 template <
typename Dispatch>
5640 VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice,
5641 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
5642 reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
5645 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
5648 template <
typename X,
typename Y,
typename...
Z,
typename Dispatch>
5656 VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice,
5657 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
5658 reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
5661 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
5665 template <
typename Dispatch>
5671 d.vkGetPhysicalDeviceQueueFamilyProperties2(
5672 m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) );
5675 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5676 template <
typename QueueFamilyProperties2Allocator,
typename Dispatch>
5682 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties;
5683 uint32_t queueFamilyPropertyCount;
5684 d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount,
nullptr );
5685 queueFamilyProperties.resize( queueFamilyPropertyCount );
5686 d.vkGetPhysicalDeviceQueueFamilyProperties2(
5687 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
5690 if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
5692 queueFamilyProperties.resize( queueFamilyPropertyCount );
5694 return queueFamilyProperties;
5697 template <
typename QueueFamilyProperties2Allocator,
5706 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( queueFamilyProperties2Allocator );
5707 uint32_t queueFamilyPropertyCount;
5708 d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount,
nullptr );
5709 queueFamilyProperties.resize( queueFamilyPropertyCount );
5710 d.vkGetPhysicalDeviceQueueFamilyProperties2(
5711 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
5714 if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
5716 queueFamilyProperties.resize( queueFamilyPropertyCount );
5718 return queueFamilyProperties;
5721 template <
typename StructureChain,
typename StructureChainAllocator,
typename Dispatch>
5727 std::vector<StructureChain, StructureChainAllocator> structureChains;
5728 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
5729 uint32_t queueFamilyPropertyCount;
5730 d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount,
nullptr );
5731 structureChains.resize( queueFamilyPropertyCount );
5732 queueFamilyProperties.resize( queueFamilyPropertyCount );
5733 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
5735 queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
5737 d.vkGetPhysicalDeviceQueueFamilyProperties2(
5738 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
5741 if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
5743 structureChains.resize( queueFamilyPropertyCount );
5745 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
5747 structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
5749 return structureChains;
5753 typename StructureChainAllocator,
5762 std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator );
5763 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
5764 uint32_t queueFamilyPropertyCount;
5765 d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount,
nullptr );
5766 structureChains.resize( queueFamilyPropertyCount );
5767 queueFamilyProperties.resize( queueFamilyPropertyCount );
5768 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
5770 queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
5772 d.vkGetPhysicalDeviceQueueFamilyProperties2(
5773 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
5776 if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
5778 structureChains.resize( queueFamilyPropertyCount );
5780 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
5782 structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
5784 return structureChains;
5788 template <
typename Dispatch>
5793 d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
5796 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5797 template <
typename Dispatch>
5804 d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
5806 return memoryProperties;
5809 template <
typename X,
typename Y,
typename...
Z,
typename Dispatch>
5816 structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
5817 d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
5819 return structureChain;
5823 template <
typename Dispatch>
5825 uint32_t * pPropertyCount,
5830 d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice,
5831 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ),
5833 reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) );
5836 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5837 template <
typename SparseImageFormatProperties2Allocator,
typename Dispatch>
5843 std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties;
5844 uint32_t propertyCount;
5845 d.vkGetPhysicalDeviceSparseImageFormatProperties2(
5846 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount,
nullptr );
5847 properties.resize( propertyCount );
5848 d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice,
5849 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
5851 reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
5854 if ( propertyCount < properties.size() )
5856 properties.resize( propertyCount );
5861 template <
typename SparseImageFormatProperties2Allocator,
5867 SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,
5868 Dispatch
const & d )
const
5872 std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator );
5873 uint32_t propertyCount;
5874 d.vkGetPhysicalDeviceSparseImageFormatProperties2(
5875 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount,
nullptr );
5876 properties.resize( propertyCount );
5877 d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice,
5878 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
5880 reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
5883 if ( propertyCount < properties.size() )
5885 properties.resize( propertyCount );
5891 template <
typename Dispatch>
5897 d.vkTrimCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>(
flags ) );
5900 template <
typename Dispatch>
5906 d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( pQueueInfo ), reinterpret_cast<VkQueue *>( pQueue ) );
5909 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5910 template <
typename Dispatch>
5917 d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( &queueInfo ), reinterpret_cast<VkQueue *>( &queue ) );
5923 template <
typename Dispatch>
5931 return static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device,
5932 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ),
5933 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
5934 reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) );
5937 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5938 template <
typename Dispatch>
5942 Dispatch
const & d )
const
5949 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
5950 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
5951 reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) );
5954 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), ycbcrConversion );
5957 # ifndef VULKAN_HPP_NO_SMART_HANDLE
5958 template <
typename Dispatch>
5962 Dispatch
const & d )
const
5969 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
5970 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
5971 reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) );
5975 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
5981 template <
typename Dispatch>
5987 d.vkDestroySamplerYcbcrConversion(
5988 m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
5991 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
5992 template <
typename Dispatch>
5999 d.vkDestroySamplerYcbcrConversion(
6001 static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
6002 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
6006 template <
typename Dispatch>
6012 d.vkDestroySamplerYcbcrConversion(
6013 m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
6016 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6017 template <
typename Dispatch>
6024 d.vkDestroySamplerYcbcrConversion(
6026 static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
6027 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
6031 template <
typename Dispatch>
6039 return static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device,
6040 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ),
6041 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
6042 reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) );
6045 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6046 template <
typename Dispatch>
6050 Dispatch
const & d )
const
6057 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
6058 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
6059 reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) );
6062 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorUpdateTemplate );
6065 # ifndef VULKAN_HPP_NO_SMART_HANDLE
6066 template <
typename Dispatch>
6070 Dispatch
const & d )
const
6077 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
6078 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
6079 reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) );
6089 template <
typename Dispatch>
6095 d.vkDestroyDescriptorUpdateTemplate(
6096 m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
6099 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6100 template <
typename Dispatch>
6107 d.vkDestroyDescriptorUpdateTemplate(
6109 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
6110 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
6114 template <
typename Dispatch>
6120 d.vkDestroyDescriptorUpdateTemplate(
6121 m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
6124 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6125 template <
typename Dispatch>
6132 d.vkDestroyDescriptorUpdateTemplate(
6134 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
6135 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
6139 template <
typename Dispatch>
6146 d.vkUpdateDescriptorSetWithTemplate(
6147 m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
6150 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6151 template <
typename DataType,
typename Dispatch>
6159 d.vkUpdateDescriptorSetWithTemplate( m_device,
6160 static_cast<VkDescriptorSet>( descriptorSet ),
6161 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
6162 reinterpret_cast<const void *>( &
data ) );
6166 template <
typename Dispatch>
6172 d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice,
6173 reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ),
6174 reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) );
6177 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6178 template <
typename Dispatch>
6186 d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice,
6187 reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ),
6188 reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
6190 return externalBufferProperties;
6194 template <
typename Dispatch>
6200 d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice,
6201 reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ),
6202 reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) );
6205 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6206 template <
typename Dispatch>
6214 d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice,
6215 reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ),
6216 reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
6218 return externalFenceProperties;
6222 template <
typename Dispatch>
6229 d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice,
6230 reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ),
6231 reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) );
6234 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6235 template <
typename Dispatch>
6243 d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice,
6244 reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ),
6245 reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
6247 return externalSemaphoreProperties;
6251 template <
typename Dispatch>
6257 d.vkGetDescriptorSetLayoutSupport(
6258 m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) );
6261 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6262 template <
typename Dispatch>
6270 d.vkGetDescriptorSetLayoutSupport(
6271 m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
6276 template <
typename X,
typename Y,
typename...
Z,
typename Dispatch>
6285 d.vkGetDescriptorSetLayoutSupport(
6286 m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
6288 return structureChain;
6294 template <
typename Dispatch>
6299 uint32_t maxDrawCount,
6304 d.vkCmdDrawIndirectCount( m_commandBuffer,
6305 static_cast<VkBuffer>(
buffer ),
6306 static_cast<VkDeviceSize>(
offset ),
6307 static_cast<VkBuffer>( countBuffer ),
6308 static_cast<VkDeviceSize>( countBufferOffset ),
6313 template <
typename Dispatch>
6318 uint32_t maxDrawCount,
6323 d.vkCmdDrawIndexedIndirectCount( m_commandBuffer,
6324 static_cast<VkBuffer>(
buffer ),
6325 static_cast<VkDeviceSize>(
offset ),
6326 static_cast<VkBuffer>( countBuffer ),
6327 static_cast<VkDeviceSize>( countBufferOffset ),
6332 template <
typename Dispatch>
6339 return static_cast<Result>( d.vkCreateRenderPass2( m_device,
6340 reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ),
6341 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
6342 reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
6345 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6346 template <
typename Dispatch>
6350 Dispatch
const & d )
const
6356 d.vkCreateRenderPass2( m_device,
6357 reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
6358 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
6359 reinterpret_cast<VkRenderPass *>( &renderPass ) );
6365 # ifndef VULKAN_HPP_NO_SMART_HANDLE
6366 template <
typename Dispatch>
6370 Dispatch
const & d )
const
6376 d.vkCreateRenderPass2( m_device,
6377 reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
6378 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
6379 reinterpret_cast<VkRenderPass *>( &renderPass ) );
6383 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
6389 template <
typename Dispatch>
6395 d.vkCmdBeginRenderPass2(
6396 m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
6399 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6400 template <
typename Dispatch>
6407 d.vkCmdBeginRenderPass2(
6408 m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
6412 template <
typename Dispatch>
6418 d.vkCmdNextSubpass2(
6419 m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
6422 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6423 template <
typename Dispatch>
6430 d.vkCmdNextSubpass2(
6431 m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
6435 template <
typename Dispatch>
6440 d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
6443 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6444 template <
typename Dispatch>
6450 d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
6454 template <
typename Dispatch>
6459 d.vkResetQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
6462 template <
typename Dispatch>
6468 return static_cast<Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
6471 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6472 template <
typename Dispatch>
6474 Dispatch
const & d )
const
6479 VkResult result = d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), &value );
6486 template <
typename Dispatch>
6492 return static_cast<Result>( d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ),
timeout ) );
6495 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6496 template <
typename Dispatch>
6502 VkResult result = d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout );
6503 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
6511 template <
typename Dispatch>
6516 return static_cast<Result>( d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
6519 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6520 template <
typename Dispatch>
6526 VkResult result = d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) );
6533 template <
typename Dispatch>
6538 return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
6541 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6542 template <
typename Dispatch>
6548 VkDeviceAddress result = d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
6554 template <
typename Dispatch>
6559 return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
6562 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6563 template <
typename Dispatch>
6569 uint64_t
result = d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
6575 template <
typename Dispatch>
6580 return d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
6583 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6584 template <
typename Dispatch>
6590 uint64_t
result = d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
6598 template <
typename Dispatch>
6604 return static_cast<Result>(
6605 d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) );
6608 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6609 template <
typename PhysicalDeviceToolPropertiesAllocator,
typename Dispatch>
6616 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties;
6621 result = d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount,
nullptr );
6624 toolProperties.resize( toolCount );
6626 d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
6631 if ( toolCount < toolProperties.size() )
6633 toolProperties.resize( toolCount );
6635 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), toolProperties );
6638 template <
typename PhysicalDeviceToolPropertiesAllocator,
6648 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties(
6649 physicalDeviceToolPropertiesAllocator );
6654 result = d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount,
nullptr );
6657 toolProperties.resize( toolCount );
6659 d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
6664 if ( toolCount < toolProperties.size() )
6666 toolProperties.resize( toolCount );
6668 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), toolProperties );
6672 template <
typename Dispatch>
6679 return static_cast<Result>( d.vkCreatePrivateDataSlot( m_device,
6680 reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ),
6681 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
6682 reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) );
6685 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6686 template <
typename Dispatch>
6690 Dispatch
const & d )
const
6696 d.vkCreatePrivateDataSlot( m_device,
6697 reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
6698 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
6699 reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) );
6702 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), privateDataSlot );
6705 # ifndef VULKAN_HPP_NO_SMART_HANDLE
6706 template <
typename Dispatch>
6710 Dispatch
const & d )
const
6716 d.vkCreatePrivateDataSlot( m_device,
6717 reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
6718 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
6719 reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) );
6723 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
6729 template <
typename Dispatch>
6735 d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
6738 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6739 template <
typename Dispatch>
6746 d.vkDestroyPrivateDataSlot(
6748 static_cast<VkPrivateDataSlot>( privateDataSlot ),
6749 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
6753 template <
typename Dispatch>
6759 d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
6762 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6763 template <
typename Dispatch>
6770 d.vkDestroyPrivateDataSlot(
6772 static_cast<VkPrivateDataSlot>( privateDataSlot ),
6773 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
6777 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
6778 template <
typename Dispatch>
6780 uint64_t objectHandle,
6786 return static_cast<Result>(
6787 d.vkSetPrivateData( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ),
data ) );
6790 template <
typename Dispatch>
6792 uint64_t objectHandle,
6795 Dispatch
const & d )
const
6800 d.vkSetPrivateData( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data );
6807 template <
typename Dispatch>
6809 uint64_t objectHandle,
6815 d.vkGetPrivateData( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData );
6818 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6819 template <
typename Dispatch>
6821 uint64_t objectHandle,
6828 d.vkGetPrivateData( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data );
6834 template <
typename Dispatch>
6840 d.vkCmdSetEvent2( m_commandBuffer, static_cast<VkEvent>(
event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
6843 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6844 template <
typename Dispatch>
6851 d.vkCmdSetEvent2( m_commandBuffer, static_cast<VkEvent>(
event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
6855 template <
typename Dispatch>
6861 d.vkCmdResetEvent2( m_commandBuffer, static_cast<VkEvent>(
event ), static_cast<VkPipelineStageFlags2>( stageMask ) );
6864 template <
typename Dispatch>
6872 m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) );
6875 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6876 template <
typename Dispatch>
6882 # ifdef VULKAN_HPP_NO_EXCEPTIONS
6885 if ( events.size() != dependencyInfos.size() )
6891 d.vkCmdWaitEvents2( m_commandBuffer,
6893 reinterpret_cast<const VkEvent *
>( events.data() ),
6894 reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) );
6898 template <
typename Dispatch>
6903 d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
6906 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6907 template <
typename Dispatch>
6913 d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
6917 template <
typename Dispatch>
6924 d.vkCmdWriteTimestamp2( m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ),
query );
6927 template <
typename Dispatch>
6934 return static_cast<Result>( d.vkQueueSubmit2( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) );
6937 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6938 template <
typename Dispatch>
6951 template <
typename Dispatch>
6956 d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) );
6959 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6960 template <
typename Dispatch>
6966 d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( ©BufferInfo ) );
6970 template <
typename Dispatch>
6974 d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) );
6977 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6978 template <
typename Dispatch>
6983 d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( ©ImageInfo ) );
6987 template <
typename Dispatch>
6992 d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) );
6995 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
6996 template <
typename Dispatch>
7002 d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( ©BufferToImageInfo ) );
7006 template <
typename Dispatch>
7011 d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) );
7014 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7015 template <
typename Dispatch>
7021 d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( ©ImageToBufferInfo ) );
7025 template <
typename Dispatch>
7029 d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) );
7032 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7033 template <
typename Dispatch>
7038 d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) );
7042 template <
typename Dispatch>
7047 d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) );
7050 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7051 template <
typename Dispatch>
7057 d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) );
7061 template <
typename Dispatch>
7066 d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) );
7069 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7070 template <
typename Dispatch>
7076 d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) );
7080 template <
typename Dispatch>
7084 d.vkCmdEndRendering( m_commandBuffer );
7087 template <
typename Dispatch>
7091 d.vkCmdSetCullMode( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) );
7094 template <
typename Dispatch>
7098 d.vkCmdSetFrontFace( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) );
7101 template <
typename Dispatch>
7106 d.vkCmdSetPrimitiveTopology( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) );
7109 template <
typename Dispatch>
7115 d.vkCmdSetViewportWithCount( m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
7118 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7119 template <
typename Dispatch>
7125 d.vkCmdSetViewportWithCount( m_commandBuffer, viewports.size(),
reinterpret_cast<const VkViewport *
>( viewports.data() ) );
7129 template <
typename Dispatch>
7134 d.vkCmdSetScissorWithCount( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
7137 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7138 template <
typename Dispatch>
7144 d.vkCmdSetScissorWithCount( m_commandBuffer, scissors.size(),
reinterpret_cast<const VkRect2D *
>( scissors.data() ) );
7148 template <
typename Dispatch>
7150 uint32_t bindingCount,
7158 d.vkCmdBindVertexBuffers2( m_commandBuffer,
7161 reinterpret_cast<const VkBuffer *>( pBuffers ),
7162 reinterpret_cast<const VkDeviceSize *>( pOffsets ),
7163 reinterpret_cast<const VkDeviceSize *>( pSizes ),
7164 reinterpret_cast<const VkDeviceSize *>( pStrides ) );
7167 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7168 template <
typename Dispatch>
7177 # ifdef VULKAN_HPP_NO_EXCEPTIONS
7196 d.vkCmdBindVertexBuffers2( m_commandBuffer,
7199 reinterpret_cast<const VkBuffer *
>(
buffers.data() ),
7200 reinterpret_cast<const VkDeviceSize *>(
offsets.data() ),
7201 reinterpret_cast<const VkDeviceSize *>(
sizes.data() ),
7202 reinterpret_cast<const VkDeviceSize *>(
strides.data() ) );
7206 template <
typename Dispatch>
7210 d.vkCmdSetDepthTestEnable( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) );
7213 template <
typename Dispatch>
7217 d.vkCmdSetDepthWriteEnable( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) );
7220 template <
typename Dispatch>
7224 d.vkCmdSetDepthCompareOp( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) );
7227 template <
typename Dispatch>
7232 d.vkCmdSetDepthBoundsTestEnable( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) );
7235 template <
typename Dispatch>
7239 d.vkCmdSetStencilTestEnable( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) );
7242 template <
typename Dispatch>
7251 d.vkCmdSetStencilOp( m_commandBuffer,
7252 static_cast<VkStencilFaceFlags>( faceMask ),
7253 static_cast<VkStencilOp>( failOp ),
7254 static_cast<VkStencilOp>( passOp ),
7255 static_cast<VkStencilOp>( depthFailOp ),
7256 static_cast<VkCompareOp>( compareOp ) );
7259 template <
typename Dispatch>
7264 d.vkCmdSetRasterizerDiscardEnable( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) );
7267 template <
typename Dispatch>
7271 d.vkCmdSetDepthBiasEnable( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) );
7274 template <
typename Dispatch>
7279 d.vkCmdSetPrimitiveRestartEnable( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) );
7282 template <
typename Dispatch>
7288 d.vkGetDeviceBufferMemoryRequirements(
7289 m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
7292 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7293 template <
typename Dispatch>
7300 d.vkGetDeviceBufferMemoryRequirements(
7301 m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
7303 return memoryRequirements;
7306 template <
typename X,
typename Y,
typename...
Z,
typename Dispatch>
7314 d.vkGetDeviceBufferMemoryRequirements(
7315 m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
7317 return structureChain;
7321 template <
typename Dispatch>
7327 d.vkGetDeviceImageMemoryRequirements(
7328 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
7331 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7332 template <
typename Dispatch>
7339 d.vkGetDeviceImageMemoryRequirements(
7340 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
7342 return memoryRequirements;
7345 template <
typename X,
typename Y,
typename...
Z,
typename Dispatch>
7353 d.vkGetDeviceImageMemoryRequirements(
7354 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
7356 return structureChain;
7360 template <
typename Dispatch>
7362 uint32_t * pSparseMemoryRequirementCount,
7367 d.vkGetDeviceImageSparseMemoryRequirements( m_device,
7368 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ),
7369 pSparseMemoryRequirementCount,
7370 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
7373 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7374 template <
typename SparseImageMemoryRequirements2Allocator,
typename Dispatch>
7380 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
7381 uint32_t sparseMemoryRequirementCount;
7382 d.vkGetDeviceImageSparseMemoryRequirements(
7383 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount,
nullptr );
7384 sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
7385 d.vkGetDeviceImageSparseMemoryRequirements( m_device,
7386 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
7387 &sparseMemoryRequirementCount,
7388 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
7390 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
7391 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
7393 sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
7395 return sparseMemoryRequirements;
7398 template <
typename SparseImageMemoryRequirements2Allocator,
7404 SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
7405 Dispatch
const & d )
const
7409 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
7410 sparseImageMemoryRequirements2Allocator );
7411 uint32_t sparseMemoryRequirementCount;
7412 d.vkGetDeviceImageSparseMemoryRequirements(
7413 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount,
nullptr );
7414 sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
7415 d.vkGetDeviceImageSparseMemoryRequirements( m_device,
7416 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
7417 &sparseMemoryRequirementCount,
7418 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
7420 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
7421 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
7423 sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
7425 return sparseMemoryRequirements;
7431 template <
typename Dispatch>
7437 d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>(
surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
7440 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7441 template <
typename Dispatch>
7448 d.vkDestroySurfaceKHR( m_instance,
7449 static_cast<VkSurfaceKHR>(
surface ),
7450 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
7454 template <
typename Dispatch>
7460 d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>(
surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
7463 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7464 template <
typename Dispatch>
7471 d.vkDestroySurfaceKHR( m_instance,
7472 static_cast<VkSurfaceKHR>(
surface ),
7473 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
7477 template <
typename Dispatch>
7484 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR(
7485 m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>(
surface ), reinterpret_cast<VkBool32 *>( pSupported ) ) );
7488 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7489 template <
typename Dispatch>
7497 m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( &supported ) );
7504 template <
typename Dispatch>
7510 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
7511 m_physicalDevice, static_cast<VkSurfaceKHR>(
surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR *>( pSurfaceCapabilities ) ) );
7514 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7515 template <
typename Dispatch>
7523 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR *>( &surfaceCapabilities ) );
7526 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceCapabilities );
7530 template <
typename Dispatch>
7532 uint32_t * pSurfaceFormatCount,
7537 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR(
7538 m_physicalDevice, static_cast<VkSurfaceKHR>(
surface ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( pSurfaceFormats ) ) );
7541 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7542 template <
typename SurfaceFormatKHRAllocator,
typename Dispatch>
7548 std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats;
7549 uint32_t surfaceFormatCount;
7553 result = d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount,
nullptr );
7554 if ( ( result ==
VK_SUCCESS ) && surfaceFormatCount )
7556 surfaceFormats.resize( surfaceFormatCount );
7557 result = d.vkGetPhysicalDeviceSurfaceFormatsKHR(
7558 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) );
7563 if ( surfaceFormatCount < surfaceFormats.size() )
7565 surfaceFormats.resize( surfaceFormatCount );
7567 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats );
7570 template <
typename SurfaceFormatKHRAllocator,
7576 SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator,
7577 Dispatch
const & d )
const
7581 std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats( surfaceFormatKHRAllocator );
7582 uint32_t surfaceFormatCount;
7586 result = d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount,
nullptr );
7587 if ( ( result ==
VK_SUCCESS ) && surfaceFormatCount )
7589 surfaceFormats.resize( surfaceFormatCount );
7590 result = d.vkGetPhysicalDeviceSurfaceFormatsKHR(
7591 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) );
7596 if ( surfaceFormatCount < surfaceFormats.size() )
7598 surfaceFormats.resize( surfaceFormatCount );
7600 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats );
7604 template <
typename Dispatch>
7606 uint32_t * pPresentModeCount,
7611 return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR(
7612 m_physicalDevice, static_cast<VkSurfaceKHR>(
surface ), pPresentModeCount, reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) );
7615 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7616 template <
typename PresentModeKHRAllocator,
typename Dispatch>
7622 std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes;
7623 uint32_t presentModeCount;
7627 result = d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount,
nullptr );
7628 if ( ( result ==
VK_SUCCESS ) && presentModeCount )
7630 presentModes.resize( presentModeCount );
7631 result = d.vkGetPhysicalDeviceSurfacePresentModesKHR(
7632 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
7637 if ( presentModeCount < presentModes.size() )
7639 presentModes.resize( presentModeCount );
7644 template <
typename PresentModeKHRAllocator,
7650 PresentModeKHRAllocator & presentModeKHRAllocator,
7651 Dispatch
const & d )
const
7655 std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator );
7656 uint32_t presentModeCount;
7660 result = d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount,
nullptr );
7661 if ( ( result ==
VK_SUCCESS ) && presentModeCount )
7663 presentModes.resize( presentModeCount );
7664 result = d.vkGetPhysicalDeviceSurfacePresentModesKHR(
7665 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
7670 if ( presentModeCount < presentModes.size() )
7672 presentModes.resize( presentModeCount );
7680 template <
typename Dispatch>
7687 return static_cast<Result>( d.vkCreateSwapchainKHR( m_device,
7688 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfo ),
7689 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
7690 reinterpret_cast<VkSwapchainKHR *>( pSwapchain ) ) );
7693 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7694 template <
typename Dispatch>
7698 Dispatch
const & d )
const
7704 d.vkCreateSwapchainKHR( m_device,
7705 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
7706 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
7707 reinterpret_cast<VkSwapchainKHR *>( &swapchain ) );
7713 # ifndef VULKAN_HPP_NO_SMART_HANDLE
7714 template <
typename Dispatch>
7718 Dispatch
const & d )
const
7724 d.vkCreateSwapchainKHR( m_device,
7725 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
7726 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
7727 reinterpret_cast<VkSwapchainKHR *>( &swapchain ) );
7731 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
7737 template <
typename Dispatch>
7743 d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
7746 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7747 template <
typename Dispatch>
7754 d.vkDestroySwapchainKHR( m_device,
7755 static_cast<VkSwapchainKHR>( swapchain ),
7756 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
7760 template <
typename Dispatch>
7766 d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
7769 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7770 template <
typename Dispatch>
7777 d.vkDestroySwapchainKHR( m_device,
7778 static_cast<VkSwapchainKHR>( swapchain ),
7779 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
7783 template <
typename Dispatch>
7785 uint32_t * pSwapchainImageCount,
7790 return static_cast<Result>(
7791 d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), pSwapchainImageCount, reinterpret_cast<VkImage *>( pSwapchainImages ) ) );
7794 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7795 template <
typename ImageAllocator,
typename Dispatch>
7801 std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator> swapchainImages;
7802 uint32_t swapchainImageCount;
7806 result = d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount,
nullptr );
7807 if ( ( result ==
VK_SUCCESS ) && swapchainImageCount )
7809 swapchainImages.resize( swapchainImageCount );
7810 result = d.vkGetSwapchainImagesKHR(
7811 m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) );
7816 if ( swapchainImageCount < swapchainImages.size() )
7818 swapchainImages.resize( swapchainImageCount );
7820 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchainImages );
7829 std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator> swapchainImages( imageAllocator );
7830 uint32_t swapchainImageCount;
7834 result = d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount,
nullptr );
7835 if ( ( result ==
VK_SUCCESS ) && swapchainImageCount )
7837 swapchainImages.resize( swapchainImageCount );
7838 result = d.vkGetSwapchainImagesKHR(
7839 m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) );
7844 if ( swapchainImageCount < swapchainImages.size() )
7846 swapchainImages.resize( swapchainImageCount );
7848 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchainImages );
7852 template <
typename Dispatch>
7857 uint32_t * pImageIndex,
7861 return static_cast<Result>( d.vkAcquireNextImageKHR(
7862 m_device, static_cast<VkSwapchainKHR>( swapchain ),
timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), pImageIndex ) );
7865 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7866 template <
typename Dispatch>
7871 Dispatch
const & d )
const
7875 uint32_t imageIndex;
7877 m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), &imageIndex );
7878 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
7889 template <
typename Dispatch>
7894 return static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( pPresentInfo ) ) );
7897 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7898 template <
typename Dispatch>
7900 Dispatch
const & d )
const
7904 VkResult result = d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( &presentInfo ) );
7905 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
7913 template <
typename Dispatch>
7918 return static_cast<Result>(
7919 d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( pDeviceGroupPresentCapabilities ) ) );
7922 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7923 template <
typename Dispatch>
7931 d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( &deviceGroupPresentCapabilities ) );
7934 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), deviceGroupPresentCapabilities );
7938 template <
typename Dispatch>
7944 return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR(
7945 m_device, static_cast<VkSurfaceKHR>(
surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
7948 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7949 template <
typename Dispatch>
7957 m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) );
7964 template <
typename Dispatch>
7966 uint32_t * pRectCount,
7971 return static_cast<Result>(
7972 d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>(
surface ), pRectCount, reinterpret_cast<VkRect2D *>( pRects ) ) );
7975 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
7976 template <
typename Rect2DAllocator,
typename Dispatch>
7982 std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator> rects;
7987 result = d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount,
nullptr );
7990 rects.resize( rectCount );
7991 result = d.vkGetPhysicalDevicePresentRectanglesKHR(
7992 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) );
7997 if ( rectCount < rects.size() )
7999 rects.resize( rectCount );
8010 std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator> rects( rect2DAllocator );
8015 result = d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount,
nullptr );
8018 rects.resize( rectCount );
8019 result = d.vkGetPhysicalDevicePresentRectanglesKHR(
8020 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) );
8025 if ( rectCount < rects.size() )
8027 rects.resize( rectCount );
8033 template <
typename Dispatch>
8035 uint32_t * pImageIndex,
8039 return static_cast<Result>( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( pAcquireInfo ), pImageIndex ) );
8042 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8043 template <
typename Dispatch>
8045 Dispatch
const & d )
const
8049 uint32_t imageIndex;
8050 VkResult result = d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( &acquireInfo ), &imageIndex );
8051 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
8064 template <
typename Dispatch>
8070 return static_cast<Result>(
8071 d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( pProperties ) ) );
8074 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8075 template <
typename DisplayPropertiesKHRAllocator,
typename Dispatch>
8081 std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties;
8082 uint32_t propertyCount;
8086 result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount,
nullptr );
8087 if ( ( result ==
VK_SUCCESS ) && propertyCount )
8089 properties.resize( propertyCount );
8090 result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) );
8095 if ( propertyCount < properties.size() )
8097 properties.resize( propertyCount );
8102 template <
typename DisplayPropertiesKHRAllocator,
8111 std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties( displayPropertiesKHRAllocator );
8112 uint32_t propertyCount;
8116 result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount,
nullptr );
8117 if ( ( result ==
VK_SUCCESS ) && propertyCount )
8119 properties.resize( propertyCount );
8120 result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) );
8125 if ( propertyCount < properties.size() )
8127 properties.resize( propertyCount );
8133 template <
typename Dispatch>
8139 return static_cast<Result>(
8140 d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( pProperties ) ) );
8143 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8144 template <
typename DisplayPlanePropertiesKHRAllocator,
typename Dispatch>
8151 std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties;
8152 uint32_t propertyCount;
8156 result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount,
nullptr );
8157 if ( ( result ==
VK_SUCCESS ) && propertyCount )
8159 properties.resize( propertyCount );
8160 result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
8161 m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) );
8166 if ( propertyCount < properties.size() )
8168 properties.resize( propertyCount );
8173 template <
typename DisplayPlanePropertiesKHRAllocator,
8183 std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties( displayPlanePropertiesKHRAllocator );
8184 uint32_t propertyCount;
8188 result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount,
nullptr );
8189 if ( ( result ==
VK_SUCCESS ) && propertyCount )
8191 properties.resize( propertyCount );
8192 result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
8193 m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) );
8198 if ( propertyCount < properties.size() )
8200 properties.resize( propertyCount );
8206 template <
typename Dispatch>
8208 uint32_t * pDisplayCount,
8213 return static_cast<Result>(
8214 d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR *>( pDisplays ) ) );
8217 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8218 template <
typename DisplayKHRAllocator,
typename Dispatch>
8224 std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator> displays;
8225 uint32_t displayCount;
8229 result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount,
nullptr );
8230 if ( ( result ==
VK_SUCCESS ) && displayCount )
8232 displays.resize( displayCount );
8233 result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) );
8238 if ( displayCount < displays.size() )
8240 displays.resize( displayCount );
8245 template <
typename DisplayKHRAllocator,
8254 std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator> displays( displayKHRAllocator );
8255 uint32_t displayCount;
8259 result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount,
nullptr );
8260 if ( ( result ==
VK_SUCCESS ) && displayCount )
8262 displays.resize( displayCount );
8263 result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) );
8268 if ( displayCount < displays.size() )
8270 displays.resize( displayCount );
8276 template <
typename Dispatch>
8278 uint32_t * pPropertyCount,
8283 return static_cast<Result>( d.vkGetDisplayModePropertiesKHR(
8284 m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( pProperties ) ) );
8287 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8288 template <
typename DisplayModePropertiesKHRAllocator,
typename Dispatch>
8295 std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties;
8296 uint32_t propertyCount;
8300 result = d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount,
nullptr );
8301 if ( ( result ==
VK_SUCCESS ) && propertyCount )
8303 properties.resize( propertyCount );
8304 result = d.vkGetDisplayModePropertiesKHR(
8305 m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) );
8310 if ( propertyCount < properties.size() )
8312 properties.resize( propertyCount );
8317 template <
typename DisplayModePropertiesKHRAllocator,
8324 DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator,
8325 Dispatch
const & d )
const
8329 std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties( displayModePropertiesKHRAllocator );
8330 uint32_t propertyCount;
8334 result = d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount,
nullptr );
8335 if ( ( result ==
VK_SUCCESS ) && propertyCount )
8337 properties.resize( propertyCount );
8338 result = d.vkGetDisplayModePropertiesKHR(
8339 m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) );
8344 if ( propertyCount < properties.size() )
8346 properties.resize( propertyCount );
8352 template <
typename Dispatch>
8360 return static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice,
8361 static_cast<VkDisplayKHR>( display ),
8362 reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( pCreateInfo ),
8363 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
8364 reinterpret_cast<VkDisplayModeKHR *>( pMode ) ) );
8367 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8368 template <
typename Dispatch>
8373 Dispatch
const & d )
const
8379 d.vkCreateDisplayModeKHR( m_physicalDevice,
8380 static_cast<VkDisplayKHR>( display ),
8381 reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ),
8382 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8383 reinterpret_cast<VkDisplayModeKHR *>( &mode ) );
8389 # ifndef VULKAN_HPP_NO_SMART_HANDLE
8390 template <
typename Dispatch>
8395 Dispatch
const & d )
const
8401 d.vkCreateDisplayModeKHR( m_physicalDevice,
8402 static_cast<VkDisplayKHR>( display ),
8403 reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ),
8404 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8405 reinterpret_cast<VkDisplayModeKHR *>( &mode ) );
8409 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
8415 template <
typename Dispatch>
8418 uint32_t planeIndex,
8423 return static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR(
8424 m_physicalDevice, static_cast<VkDisplayModeKHR>(
mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( pCapabilities ) ) );
8427 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8428 template <
typename Dispatch>
8436 m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( &capabilities ) );
8443 template <
typename Dispatch>
8450 return static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance,
8451 reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( pCreateInfo ),
8452 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
8453 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
8456 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8457 template <
typename Dispatch>
8461 Dispatch
const & d )
const
8468 reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ),
8469 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8470 reinterpret_cast<VkSurfaceKHR *>( &surface ) );
8476 # ifndef VULKAN_HPP_NO_SMART_HANDLE
8477 template <
typename Dispatch>
8481 Dispatch
const & d )
const
8488 reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ),
8489 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8490 reinterpret_cast<VkSurfaceKHR *>( &surface ) );
8494 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
8502 template <
typename Dispatch>
8510 return static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device,
8512 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfos ),
8513 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
8514 reinterpret_cast<VkSwapchainKHR *>( pSwapchains ) ) );
8517 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8518 template <
typename SwapchainKHRAllocator,
typename Dispatch>
8522 Dispatch
const & d )
const
8526 std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.
size() );
8531 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8532 reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) );
8538 template <
typename SwapchainKHRAllocator,
8545 SwapchainKHRAllocator & swapchainKHRAllocator,
8546 Dispatch
const & d )
const
8550 std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.
size(), swapchainKHRAllocator );
8555 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8556 reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) );
8562 template <
typename Dispatch>
8566 Dispatch
const & d )
const
8574 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
8575 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8576 reinterpret_cast<VkSwapchainKHR *>( &swapchain ) );
8582 # ifndef VULKAN_HPP_NO_SMART_HANDLE
8583 template <
typename Dispatch,
typename SwapchainKHRAllocator>
8588 Dispatch
const & d )
const
8592 std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains( createInfos.
size() );
8597 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8598 reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) );
8600 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains;
8601 uniqueSwapchains.reserve( createInfos.
size() );
8603 for (
auto const & swapchain : swapchains )
8607 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueSwapchains ) );
8610 template <
typename Dispatch,
8611 typename SwapchainKHRAllocator,
8613 typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<SwapchainKHR, Dispatch>>
::value,
int>
::type>
8618 SwapchainKHRAllocator & swapchainKHRAllocator,
8619 Dispatch
const & d )
const
8623 std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains( createInfos.
size() );
8628 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8629 reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) );
8631 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains( swapchainKHRAllocator );
8632 uniqueSwapchains.reserve( createInfos.
size() );
8634 for (
auto const & swapchain : swapchains )
8638 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueSwapchains ) );
8641 template <
typename Dispatch>
8645 Dispatch
const & d )
const
8653 reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
8654 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8655 reinterpret_cast<VkSwapchainKHR *>( &swapchain ) );
8659 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
8665 #if defined( VK_USE_PLATFORM_XLIB_KHR )
8668 template <
typename Dispatch>
8675 return static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance,
8676 reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( pCreateInfo ),
8677 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
8678 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
8681 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8682 template <
typename Dispatch>
8684 Instance::createXlibSurfaceKHR(
const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo,
8685 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
8686 Dispatch
const & d )
const
8692 d.vkCreateXlibSurfaceKHR( m_instance,
8693 reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ),
8694 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8695 reinterpret_cast<VkSurfaceKHR *>( &surface ) );
8701 # ifndef VULKAN_HPP_NO_SMART_HANDLE
8702 template <
typename Dispatch>
8704 Instance::createXlibSurfaceKHRUnique(
const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo,
8705 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
8706 Dispatch
const & d )
const
8712 d.vkCreateXlibSurfaceKHR( m_instance,
8713 reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ),
8714 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8715 reinterpret_cast<VkSurfaceKHR *>( &surface ) );
8719 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
8720 UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *
this, allocator, d ) ) );
8725 template <
typename Dispatch>
8727 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display * dpy, VisualID visualID, Dispatch
const & d )
const VULKAN_HPP_NOEXCEPT
8730 return static_cast<Bool32>( d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) );
8733 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8734 template <
typename Dispatch>
8736 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch
const & d )
const VULKAN_HPP_NOEXCEPT
8740 VkBool32 result = d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID );
8747 #if defined( VK_USE_PLATFORM_XCB_KHR )
8750 template <
typename Dispatch>
8757 return static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance,
8758 reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( pCreateInfo ),
8759 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
8760 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
8763 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8764 template <
typename Dispatch>
8766 Instance::createXcbSurfaceKHR(
const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo,
8767 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
8768 Dispatch
const & d )
const
8774 d.vkCreateXcbSurfaceKHR( m_instance,
8775 reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ),
8776 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8777 reinterpret_cast<VkSurfaceKHR *>( &surface ) );
8783 # ifndef VULKAN_HPP_NO_SMART_HANDLE
8784 template <
typename Dispatch>
8786 Instance::createXcbSurfaceKHRUnique(
const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo,
8787 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
8788 Dispatch
const & d )
const
8794 d.vkCreateXcbSurfaceKHR( m_instance,
8795 reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ),
8796 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8797 reinterpret_cast<VkSurfaceKHR *>( &surface ) );
8801 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
8802 UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *
this, allocator, d ) ) );
8807 template <
typename Dispatch>
8809 xcb_connection_t * connection,
8810 xcb_visualid_t visual_id,
8814 return static_cast<Bool32>( d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) );
8817 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8818 template <
typename Dispatch>
8820 xcb_connection_t & connection,
8821 xcb_visualid_t visual_id,
8826 VkBool32 result = d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id );
8833 #if defined( VK_USE_PLATFORM_WAYLAND_KHR )
8836 template <
typename Dispatch>
8843 return static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance,
8844 reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( pCreateInfo ),
8845 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
8846 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
8849 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8850 template <
typename Dispatch>
8852 Instance::createWaylandSurfaceKHR(
const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo,
8853 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
8854 Dispatch
const & d )
const
8859 VkResult result = d.vkCreateWaylandSurfaceKHR(
8861 reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ),
8862 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8863 reinterpret_cast<VkSurfaceKHR *>( &surface ) );
8869 # ifndef VULKAN_HPP_NO_SMART_HANDLE
8870 template <
typename Dispatch>
8872 Instance::createWaylandSurfaceKHRUnique(
const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo,
8873 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
8874 Dispatch
const & d )
const
8879 VkResult result = d.vkCreateWaylandSurfaceKHR(
8881 reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ),
8882 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8883 reinterpret_cast<VkSurfaceKHR *>( &surface ) );
8887 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
8888 UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *
this, allocator, d ) ) );
8893 template <
typename Dispatch>
8895 struct wl_display * display,
8899 return static_cast<Bool32>( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) );
8902 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8903 template <
typename Dispatch>
8905 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex,
struct wl_display & display, Dispatch
const & d )
const VULKAN_HPP_NOEXCEPT
8909 VkBool32 result = d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display );
8916 #if defined( VK_USE_PLATFORM_ANDROID_KHR )
8919 template <
typename Dispatch>
8926 return static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance,
8927 reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( pCreateInfo ),
8928 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
8929 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
8932 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8933 template <
typename Dispatch>
8935 Instance::createAndroidSurfaceKHR(
const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo,
8936 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
8937 Dispatch
const & d )
const
8942 VkResult result = d.vkCreateAndroidSurfaceKHR(
8944 reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ),
8945 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8946 reinterpret_cast<VkSurfaceKHR *>( &surface ) );
8952 # ifndef VULKAN_HPP_NO_SMART_HANDLE
8953 template <
typename Dispatch>
8955 Instance::createAndroidSurfaceKHRUnique(
const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo,
8956 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
8957 Dispatch
const & d )
const
8962 VkResult result = d.vkCreateAndroidSurfaceKHR(
8964 reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ),
8965 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
8966 reinterpret_cast<VkSurfaceKHR *>( &surface ) );
8970 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
8971 UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *
this, allocator, d ) ) );
8977 #if defined( VK_USE_PLATFORM_WIN32_KHR )
8980 template <
typename Dispatch>
8987 return static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance,
8988 reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( pCreateInfo ),
8989 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
8990 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
8993 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
8994 template <
typename Dispatch>
8996 Instance::createWin32SurfaceKHR(
const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo,
8997 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
8998 Dispatch
const & d )
const
9004 d.vkCreateWin32SurfaceKHR( m_instance,
9005 reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ),
9006 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9007 reinterpret_cast<VkSurfaceKHR *>( &surface ) );
9013 # ifndef VULKAN_HPP_NO_SMART_HANDLE
9014 template <
typename Dispatch>
9016 Instance::createWin32SurfaceKHRUnique(
const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo,
9017 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
9018 Dispatch
const & d )
const
9024 d.vkCreateWin32SurfaceKHR( m_instance,
9025 reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ),
9026 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9027 reinterpret_cast<VkSurfaceKHR *>( &surface ) );
9031 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
9032 UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *
this, allocator, d ) ) );
9037 template <
typename Dispatch>
9041 return static_cast<Bool32>( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) );
9047 template <
typename Dispatch>
9055 return static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance,
9056 reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( pCreateInfo ),
9057 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
9058 reinterpret_cast<VkDebugReportCallbackEXT *>( pCallback ) ) );
9061 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9062 template <
typename Dispatch>
9066 Dispatch
const & d )
const
9071 VkResult result = d.vkCreateDebugReportCallbackEXT(
9073 reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ),
9074 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9075 reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) );
9081 # ifndef VULKAN_HPP_NO_SMART_HANDLE
9082 template <
typename Dispatch>
9086 Dispatch
const & d )
const
9091 VkResult result = d.vkCreateDebugReportCallbackEXT(
9093 reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ),
9094 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9095 reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) );
9099 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
9105 template <
typename Dispatch>
9111 d.vkDestroyDebugReportCallbackEXT(
9112 m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
9115 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9116 template <
typename Dispatch>
9123 d.vkDestroyDebugReportCallbackEXT(
9125 static_cast<VkDebugReportCallbackEXT>( callback ),
9126 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
9130 template <
typename Dispatch>
9136 d.vkDestroyDebugReportCallbackEXT(
9137 m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
9140 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9141 template <
typename Dispatch>
9148 d.vkDestroyDebugReportCallbackEXT(
9150 static_cast<VkDebugReportCallbackEXT>( callback ),
9151 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
9155 template <
typename Dispatch>
9160 int32_t messageCode,
9161 const char * pLayerPrefix,
9162 const char * pMessage,
9166 d.vkDebugReportMessageEXT( m_instance,
9167 static_cast<VkDebugReportFlagsEXT>(
flags ),
9168 static_cast<VkDebugReportObjectTypeEXT>( objectType ),
9176 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9177 template <
typename Dispatch>
9182 int32_t messageCode,
9189 d.vkDebugReportMessageEXT( m_instance,
9190 static_cast<VkDebugReportFlagsEXT>(
flags ),
9191 static_cast<VkDebugReportObjectTypeEXT>( objectType ),
9195 layerPrefix.c_str(),
9202 template <
typename Dispatch>
9207 return static_cast<Result>( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( pTagInfo ) ) );
9210 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9211 template <
typename Dispatch>
9217 VkResult result = d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( &tagInfo ) );
9224 template <
typename Dispatch>
9229 return static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( pNameInfo ) ) );
9232 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9233 template <
typename Dispatch>
9239 VkResult result = d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( &nameInfo ) );
9246 template <
typename Dispatch>
9251 d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) );
9254 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9255 template <
typename Dispatch>
9261 d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
9265 template <
typename Dispatch>
9269 d.vkCmdDebugMarkerEndEXT( m_commandBuffer );
9272 template <
typename Dispatch>
9277 d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) );
9280 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9281 template <
typename Dispatch>
9287 d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
9291 #if defined( VK_ENABLE_BETA_EXTENSIONS )
9294 template <
typename Dispatch>
9296 VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities,
9300 return static_cast<Result>( d.vkGetPhysicalDeviceVideoCapabilitiesKHR(
9301 m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( pVideoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( pCapabilities ) ) );
9304 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9305 template <
typename Dispatch>
9307 PhysicalDevice::getVideoCapabilitiesKHR(
const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch
const & d )
const
9311 VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities;
9312 VkResult result = d.vkGetPhysicalDeviceVideoCapabilitiesKHR(
9313 m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) );
9319 template <
typename X,
typename Y,
typename...
Z,
typename Dispatch>
9321 PhysicalDevice::getVideoCapabilitiesKHR(
const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch
const & d )
const
9325 StructureChain<
X,
Y,
Z...> structureChain;
9326 VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>();
9327 VkResult result = d.vkGetPhysicalDeviceVideoCapabilitiesKHR(
9328 m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) );
9331 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
9335 template <
typename Dispatch>
9337 PhysicalDevice::getVideoFormatPropertiesKHR(
const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo,
9338 uint32_t * pVideoFormatPropertyCount,
9339 VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties,
9343 return static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice,
9344 reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( pVideoFormatInfo ),
9345 pVideoFormatPropertyCount,
9346 reinterpret_cast<VkVideoFormatPropertiesKHR *>( pVideoFormatProperties ) ) );
9349 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9350 template <
typename V
ideoFormatPropertiesKHRAllocator,
typename Dispatch>
9352 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>
::type
9353 PhysicalDevice::getVideoFormatPropertiesKHR(
const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch
const & d )
const
9357 std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties;
9358 uint32_t videoFormatPropertyCount;
9362 result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
9363 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount,
nullptr );
9364 if ( ( result ==
VK_SUCCESS ) && videoFormatPropertyCount )
9366 videoFormatProperties.resize( videoFormatPropertyCount );
9367 result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice,
9368 reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
9369 &videoFormatPropertyCount,
9370 reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) );
9375 if ( videoFormatPropertyCount < videoFormatProperties.size() )
9377 videoFormatProperties.resize( videoFormatPropertyCount );
9379 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), videoFormatProperties );
9382 template <
typename VideoFormatPropertiesKHRAllocator,
9387 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>
::type
9388 PhysicalDevice::getVideoFormatPropertiesKHR(
const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,
9389 VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator,
9390 Dispatch
const & d )
const
9394 std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties( videoFormatPropertiesKHRAllocator );
9395 uint32_t videoFormatPropertyCount;
9399 result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
9400 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount,
nullptr );
9401 if ( ( result ==
VK_SUCCESS ) && videoFormatPropertyCount )
9403 videoFormatProperties.resize( videoFormatPropertyCount );
9404 result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice,
9405 reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
9406 &videoFormatPropertyCount,
9407 reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) );
9412 if ( videoFormatPropertyCount < videoFormatProperties.size() )
9414 videoFormatProperties.resize( videoFormatPropertyCount );
9416 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), videoFormatProperties );
9420 template <
typename Dispatch>
9423 VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession,
9427 return static_cast<Result>( d.vkCreateVideoSessionKHR( m_device,
9428 reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( pCreateInfo ),
9429 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
9430 reinterpret_cast<VkVideoSessionKHR *>( pVideoSession ) ) );
9433 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9434 template <
typename Dispatch>
9436 Device::createVideoSessionKHR(
const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo,
9437 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
9438 Dispatch
const & d )
const
9442 VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession;
9444 d.vkCreateVideoSessionKHR( m_device,
9445 reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ),
9446 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9447 reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) );
9453 # ifndef VULKAN_HPP_NO_SMART_HANDLE
9454 template <
typename Dispatch>
9456 Device::createVideoSessionKHRUnique(
const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo,
9457 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
9458 Dispatch
const & d )
const
9462 VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession;
9464 d.vkCreateVideoSessionKHR( m_device,
9465 reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ),
9466 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9467 reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) );
9471 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
9472 UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>( videoSession, ObjectDestroy<Device, Dispatch>( *
this, allocator, d ) ) );
9477 template <
typename Dispatch>
9478 VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
9483 d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
9486 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9487 template <
typename Dispatch>
9488 VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
9489 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
9494 d.vkDestroyVideoSessionKHR(
9496 static_cast<VkVideoSessionKHR>( videoSession ),
9497 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
9501 template <
typename Dispatch>
9507 d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
9510 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9511 template <
typename Dispatch>
9513 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
9518 d.vkDestroyVideoSessionKHR(
9520 static_cast<VkVideoSessionKHR>( videoSession ),
9521 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
9525 template <
typename Dispatch>
9527 Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
9528 uint32_t * pMemoryRequirementsCount,
9529 VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR * pMemoryRequirements,
9533 return static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR( m_device,
9534 static_cast<VkVideoSessionKHR>( videoSession ),
9535 pMemoryRequirementsCount,
9536 reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( pMemoryRequirements ) ) );
9539 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9540 template <
typename V
ideoSessionMemoryRequirementsKHRAllocator,
typename Dispatch>
9542 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator>>
::type
9543 Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, Dispatch
const & d )
const
9547 std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator> memoryRequirements;
9548 uint32_t memoryRequirementsCount;
9552 result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), &memoryRequirementsCount,
nullptr );
9553 if ( ( result ==
VK_SUCCESS ) && memoryRequirementsCount )
9555 memoryRequirements.resize( memoryRequirementsCount );
9556 result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device,
9557 static_cast<VkVideoSessionKHR>( videoSession ),
9558 &memoryRequirementsCount,
9559 reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( memoryRequirements.data() ) );
9564 if ( memoryRequirementsCount < memoryRequirements.size() )
9566 memoryRequirements.resize( memoryRequirementsCount );
9568 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryRequirements );
9571 template <
typename VideoSessionMemoryRequirementsKHRAllocator,
9576 typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator>>
::type
9577 Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
9578 VideoSessionMemoryRequirementsKHRAllocator & videoSessionMemoryRequirementsKHRAllocator,
9579 Dispatch
const & d )
const
9583 std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator> memoryRequirements(
9584 videoSessionMemoryRequirementsKHRAllocator );
9585 uint32_t memoryRequirementsCount;
9589 result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), &memoryRequirementsCount,
nullptr );
9590 if ( ( result ==
VK_SUCCESS ) && memoryRequirementsCount )
9592 memoryRequirements.resize( memoryRequirementsCount );
9593 result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device,
9594 static_cast<VkVideoSessionKHR>( videoSession ),
9595 &memoryRequirementsCount,
9596 reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( memoryRequirements.data() ) );
9601 if ( memoryRequirementsCount < memoryRequirements.size() )
9603 memoryRequirements.resize( memoryRequirementsCount );
9605 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryRequirements );
9609 template <
typename Dispatch>
9611 Device::bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
9612 uint32_t bindSessionMemoryInfoCount,
9613 const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos,
9617 return static_cast<Result>( d.vkBindVideoSessionMemoryKHR( m_device,
9618 static_cast<VkVideoSessionKHR>( videoSession ),
9619 bindSessionMemoryInfoCount,
9620 reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( pBindSessionMemoryInfos ) ) );
9623 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9624 template <
typename Dispatch>
9626 VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
9628 Dispatch
const & d )
const
9632 VkResult result = d.vkBindVideoSessionMemoryKHR( m_device,
9633 static_cast<VkVideoSessionKHR>( videoSession ),
9634 bindSessionMemoryInfos.
size(),
9642 template <
typename Dispatch>
9644 Device::createVideoSessionParametersKHR(
const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo,
9646 VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters,
9650 return static_cast<Result>( d.vkCreateVideoSessionParametersKHR( m_device,
9651 reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( pCreateInfo ),
9652 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
9653 reinterpret_cast<VkVideoSessionParametersKHR *>( pVideoSessionParameters ) ) );
9656 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9657 template <
typename Dispatch>
9659 Device::createVideoSessionParametersKHR(
const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo,
9660 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
9661 Dispatch
const & d )
const
9665 VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters;
9666 VkResult result = d.vkCreateVideoSessionParametersKHR(
9668 reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ),
9669 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9670 reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) );
9673 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), videoSessionParameters );
9676 # ifndef VULKAN_HPP_NO_SMART_HANDLE
9677 template <
typename Dispatch>
9679 Device::createVideoSessionParametersKHRUnique(
const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo,
9680 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
9681 Dispatch
const & d )
const
9685 VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters;
9686 VkResult result = d.vkCreateVideoSessionParametersKHR(
9688 reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ),
9689 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
9690 reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) );
9694 UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>(
9695 videoSessionParameters, ObjectDestroy<Device, Dispatch>( *
this, allocator, d ) ) );
9700 template <
typename Dispatch>
9702 Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
9703 const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo,
9707 return static_cast<Result>( d.vkUpdateVideoSessionParametersKHR( m_device,
9708 static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
9709 reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( pUpdateInfo ) ) );
9712 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9713 template <
typename Dispatch>
9715 Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
9716 const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo,
9717 Dispatch
const & d )
const
9721 VkResult result = d.vkUpdateVideoSessionParametersKHR( m_device,
9722 static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
9723 reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( &updateInfo ) );
9730 template <
typename Dispatch>
9731 VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
9736 d.vkDestroyVideoSessionParametersKHR(
9737 m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
9740 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9741 template <
typename Dispatch>
9742 VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
9743 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
9748 d.vkDestroyVideoSessionParametersKHR(
9750 static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
9751 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
9755 template <
typename Dispatch>
9761 d.vkDestroyVideoSessionParametersKHR(
9762 m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
9765 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9766 template <
typename Dispatch>
9768 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
9773 d.vkDestroyVideoSessionParametersKHR(
9775 static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
9776 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
9780 template <
typename Dispatch>
9781 VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR(
const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo,
9785 d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( pBeginInfo ) );
9788 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9789 template <
typename Dispatch>
9790 VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR(
const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo,
9795 d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( &beginInfo ) );
9799 template <
typename Dispatch>
9800 VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR(
const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo,
9804 d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( pEndCodingInfo ) );
9807 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9808 template <
typename Dispatch>
9809 VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR(
const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo,
9814 d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( &endCodingInfo ) );
9818 template <
typename Dispatch>
9819 VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR(
const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo,
9823 d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoCodingControlInfoKHR *>( pCodingControlInfo ) );
9826 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9827 template <
typename Dispatch>
9828 VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR(
const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo,
9833 d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoCodingControlInfoKHR *>( &codingControlInfo ) );
9838 #if defined( VK_ENABLE_BETA_EXTENSIONS )
9841 template <
typename Dispatch>
9842 VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR(
const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pDecodeInfo,
9846 d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( pDecodeInfo ) );
9849 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9850 template <
typename Dispatch>
9851 VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR(
const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo,
9856 d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( &decodeInfo ) );
9863 template <
typename Dispatch>
9865 uint32_t bindingCount,
9872 d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer,
9875 reinterpret_cast<const VkBuffer *>( pBuffers ),
9876 reinterpret_cast<const VkDeviceSize *>( pOffsets ),
9877 reinterpret_cast<const VkDeviceSize *>( pSizes ) );
9880 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9881 template <
typename Dispatch>
9890 # ifdef VULKAN_HPP_NO_EXCEPTIONS
9904 d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer,
9907 reinterpret_cast<const VkBuffer *
>(
buffers.data() ),
9908 reinterpret_cast<const VkDeviceSize *>(
offsets.data() ),
9909 reinterpret_cast<const VkDeviceSize *>(
sizes.data() ) );
9913 template <
typename Dispatch>
9915 uint32_t counterBufferCount,
9921 d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer,
9924 reinterpret_cast<const VkBuffer *>( pCounterBuffers ),
9925 reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) );
9928 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9929 template <
typename Dispatch>
9937 # ifdef VULKAN_HPP_NO_EXCEPTIONS
9938 VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() );
9940 if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
9946 d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer,
9948 counterBuffers.size(),
9949 reinterpret_cast<const VkBuffer *
>( counterBuffers.data() ),
9950 reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
9954 template <
typename Dispatch>
9956 uint32_t counterBufferCount,
9962 d.vkCmdEndTransformFeedbackEXT( m_commandBuffer,
9965 reinterpret_cast<const VkBuffer *>( pCounterBuffers ),
9966 reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) );
9969 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
9970 template <
typename Dispatch>
9978 # ifdef VULKAN_HPP_NO_EXCEPTIONS
9979 VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() );
9981 if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
9987 d.vkCmdEndTransformFeedbackEXT( m_commandBuffer,
9989 counterBuffers.size(),
9990 reinterpret_cast<const VkBuffer *
>( counterBuffers.data() ),
9991 reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
9995 template <
typename Dispatch>
10003 d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ),
query, static_cast<VkQueryControlFlags>(
flags ),
index );
10006 template <
typename Dispatch>
10011 d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ),
query,
index );
10014 template <
typename Dispatch>
10016 uint32_t firstInstance,
10019 uint32_t counterOffset,
10020 uint32_t vertexStride,
10024 d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer,
10027 static_cast<VkBuffer>( counterBuffer ),
10028 static_cast<VkDeviceSize>( counterBufferOffset ),
10035 template <
typename Dispatch>
10042 return static_cast<Result>( d.vkCreateCuModuleNVX( m_device,
10043 reinterpret_cast<const VkCuModuleCreateInfoNVX *>( pCreateInfo ),
10044 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
10045 reinterpret_cast<VkCuModuleNVX *>( pModule ) ) );
10048 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10049 template <
typename Dispatch>
10053 Dispatch
const & d )
const
10059 d.vkCreateCuModuleNVX( m_device,
10060 reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ),
10061 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
10062 reinterpret_cast<VkCuModuleNVX *>( &module ) );
10068 # ifndef VULKAN_HPP_NO_SMART_HANDLE
10069 template <
typename Dispatch>
10073 Dispatch
const & d )
const
10079 d.vkCreateCuModuleNVX( m_device,
10080 reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ),
10081 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
10082 reinterpret_cast<VkCuModuleNVX *>( &module ) );
10091 template <
typename Dispatch>
10098 return static_cast<Result>( d.vkCreateCuFunctionNVX( m_device,
10099 reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( pCreateInfo ),
10100 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
10101 reinterpret_cast<VkCuFunctionNVX *>( pFunction ) ) );
10104 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10105 template <
typename Dispatch>
10109 Dispatch
const & d )
const
10115 d.vkCreateCuFunctionNVX( m_device,
10116 reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ),
10117 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
10118 reinterpret_cast<VkCuFunctionNVX *>( &
function ) );
10124 # ifndef VULKAN_HPP_NO_SMART_HANDLE
10125 template <
typename Dispatch>
10129 Dispatch
const & d )
const
10135 d.vkCreateCuFunctionNVX( m_device,
10136 reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ),
10137 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
10138 reinterpret_cast<VkCuFunctionNVX *>( &
function ) );
10142 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
10148 template <
typename Dispatch>
10154 d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
10157 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10158 template <
typename Dispatch>
10165 d.vkDestroyCuModuleNVX( m_device,
10166 static_cast<VkCuModuleNVX>( module ),
10167 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
10171 template <
typename Dispatch>
10177 d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
10180 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10181 template <
typename Dispatch>
10188 d.vkDestroyCuModuleNVX( m_device,
10189 static_cast<VkCuModuleNVX>( module ),
10190 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
10194 template <
typename Dispatch>
10200 d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>(
function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
10203 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10204 template <
typename Dispatch>
10211 d.vkDestroyCuFunctionNVX( m_device,
10212 static_cast<VkCuFunctionNVX>(
function ),
10213 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
10217 template <
typename Dispatch>
10223 d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>(
function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
10226 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10227 template <
typename Dispatch>
10234 d.vkDestroyCuFunctionNVX( m_device,
10235 static_cast<VkCuFunctionNVX>(
function ),
10236 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
10240 template <
typename Dispatch>
10245 d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( pLaunchInfo ) );
10248 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10249 template <
typename Dispatch>
10255 d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( &launchInfo ) );
10261 template <
typename Dispatch>
10266 return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( pInfo ) );
10269 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10270 template <
typename Dispatch>
10276 uint32_t result = d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( &info ) );
10282 template <
typename Dispatch>
10288 return static_cast<Result>(
10289 d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( pProperties ) ) );
10292 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10293 template <
typename Dispatch>
10301 d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( &properties ) );
10310 template <
typename Dispatch>
10315 uint32_t maxDrawCount,
10320 d.vkCmdDrawIndirectCountAMD( m_commandBuffer,
10321 static_cast<VkBuffer>(
buffer ),
10322 static_cast<VkDeviceSize>(
offset ),
10323 static_cast<VkBuffer>( countBuffer ),
10324 static_cast<VkDeviceSize>( countBufferOffset ),
10329 template <
typename Dispatch>
10334 uint32_t maxDrawCount,
10339 d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer,
10340 static_cast<VkBuffer>(
buffer ),
10341 static_cast<VkDeviceSize>(
offset ),
10342 static_cast<VkBuffer>( countBuffer ),
10343 static_cast<VkDeviceSize>( countBufferOffset ),
10350 template <
typename Dispatch>
10354 size_t * pInfoSize,
10359 return static_cast<Result>( d.vkGetShaderInfoAMD( m_device,
10360 static_cast<VkPipeline>( pipeline ),
10361 static_cast<VkShaderStageFlagBits>( shaderStage ),
10362 static_cast<VkShaderInfoTypeAMD>( infoType ),
10367 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10368 template <
typename U
int8_tAllocator,
typename Dispatch>
10373 Dispatch
const & d )
const
10377 std::vector<uint8_t, Uint8_tAllocator> info;
10382 result = d.vkGetShaderInfoAMD( m_device,
10383 static_cast<VkPipeline>( pipeline ),
10384 static_cast<VkShaderStageFlagBits>( shaderStage ),
10385 static_cast<VkShaderInfoTypeAMD>( infoType ),
10390 info.resize( infoSize );
10391 result = d.vkGetShaderInfoAMD( m_device,
10392 static_cast<VkPipeline>( pipeline ),
10393 static_cast<VkShaderStageFlagBits>( shaderStage ),
10394 static_cast<VkShaderInfoTypeAMD>( infoType ),
10396 reinterpret_cast<void *>( info.data() ) );
10401 if ( infoSize < info.size() )
10403 info.resize( infoSize );
10408 template <
typename Uint8_tAllocator,
10416 Uint8_tAllocator & uint8_tAllocator,
10417 Dispatch
const & d )
const
10421 std::vector<uint8_t, Uint8_tAllocator> info( uint8_tAllocator );
10426 result = d.vkGetShaderInfoAMD( m_device,
10427 static_cast<VkPipeline>( pipeline ),
10428 static_cast<VkShaderStageFlagBits>( shaderStage ),
10429 static_cast<VkShaderInfoTypeAMD>( infoType ),
10434 info.resize( infoSize );
10435 result = d.vkGetShaderInfoAMD( m_device,
10436 static_cast<VkPipeline>( pipeline ),
10437 static_cast<VkShaderStageFlagBits>( shaderStage ),
10438 static_cast<VkShaderInfoTypeAMD>( infoType ),
10440 reinterpret_cast<void *>( info.data() ) );
10445 if ( infoSize < info.size() )
10447 info.resize( infoSize );
10455 template <
typename Dispatch>
10460 d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) );
10463 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10464 template <
typename Dispatch>
10470 d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) );
10474 template <
typename Dispatch>
10478 d.vkCmdEndRenderingKHR( m_commandBuffer );
10481 #if defined( VK_USE_PLATFORM_GGP )
10484 template <
typename Dispatch>
10486 Instance::createStreamDescriptorSurfaceGGP(
const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo,
10492 return static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( m_instance,
10493 reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( pCreateInfo ),
10494 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
10495 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
10498 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10499 template <
typename Dispatch>
10501 Instance::createStreamDescriptorSurfaceGGP(
const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo,
10502 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
10503 Dispatch
const & d )
const
10508 VkResult result = d.vkCreateStreamDescriptorSurfaceGGP(
10510 reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ),
10511 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
10512 reinterpret_cast<VkSurfaceKHR *>( &surface ) );
10518 # ifndef VULKAN_HPP_NO_SMART_HANDLE
10519 template <
typename Dispatch>
10521 Instance::createStreamDescriptorSurfaceGGPUnique(
const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo,
10522 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
10523 Dispatch
const & d )
const
10528 VkResult result = d.vkCreateStreamDescriptorSurfaceGGP(
10530 reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ),
10531 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
10532 reinterpret_cast<VkSurfaceKHR *>( &surface ) );
10536 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
10537 UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *
this, allocator, d ) ) );
10545 template <
typename Dispatch>
10557 return static_cast<Result>(
10558 d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice,
10559 static_cast<VkFormat>(
format ),
10560 static_cast<VkImageType>(
type ),
10561 static_cast<VkImageTiling>( tiling ),
10562 static_cast<VkImageUsageFlags>(
usage ),
10563 static_cast<VkImageCreateFlags>(
flags ),
10564 static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ),
10565 reinterpret_cast<VkExternalImageFormatPropertiesNV *>( pExternalImageFormatProperties ) ) );
10568 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10569 template <
typename Dispatch>
10577 Dispatch
const & d )
const
10583 d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice,
10584 static_cast<VkFormat>( format ),
10585 static_cast<VkImageType>( type ),
10586 static_cast<VkImageTiling>( tiling ),
10587 static_cast<VkImageUsageFlags>( usage ),
10588 static_cast<VkImageCreateFlags>( flags ),
10589 static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ),
10590 reinterpret_cast<VkExternalImageFormatPropertiesNV *>( &externalImageFormatProperties ) );
10593 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), externalImageFormatProperties );
10597 #if defined( VK_USE_PLATFORM_WIN32_KHR )
10600 template <
typename Dispatch>
10607 return static_cast<Result>(
10608 d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>(
memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>(
handleType ), pHandle ) );
10611 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10612 template <
typename Dispatch>
10620 d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), &handle );
10630 template <
typename Dispatch>
10635 d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) );
10638 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10639 template <
typename Dispatch>
10646 d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
10651 template <
typename X,
typename Y,
typename...
Z,
typename Dispatch>
10658 d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
10660 return structureChain;
10664 template <
typename Dispatch>
10669 d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) );
10672 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10673 template <
typename Dispatch>
10680 d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
10685 template <
typename X,
typename Y,
typename...
Z,
typename Dispatch>
10692 d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
10694 return structureChain;
10698 template <
typename Dispatch>
10704 d.vkGetPhysicalDeviceFormatProperties2KHR(
10705 m_physicalDevice, static_cast<VkFormat>(
format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) );
10708 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10709 template <
typename Dispatch>
10716 d.vkGetPhysicalDeviceFormatProperties2KHR(
10717 m_physicalDevice, static_cast<VkFormat>(
format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
10719 return formatProperties;
10722 template <
typename X,
typename Y,
typename...
Z,
typename Dispatch>
10730 d.vkGetPhysicalDeviceFormatProperties2KHR(
10731 m_physicalDevice, static_cast<VkFormat>(
format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
10733 return structureChain;
10737 template <
typename Dispatch>
10744 return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice,
10745 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ),
10746 reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) );
10749 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10750 template <
typename Dispatch>
10757 VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice,
10758 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
10759 reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
10762 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
10765 template <
typename X,
typename Y,
typename...
Z,
typename Dispatch>
10773 VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice,
10774 reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
10775 reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
10778 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
10782 template <
typename Dispatch>
10788 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
10789 m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) );
10792 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10793 template <
typename QueueFamilyProperties2Allocator,
typename Dispatch>
10799 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties;
10800 uint32_t queueFamilyPropertyCount;
10801 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount,
nullptr );
10802 queueFamilyProperties.resize( queueFamilyPropertyCount );
10803 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
10804 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
10807 if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
10809 queueFamilyProperties.resize( queueFamilyPropertyCount );
10811 return queueFamilyProperties;
10814 template <
typename QueueFamilyProperties2Allocator,
10823 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( queueFamilyProperties2Allocator );
10824 uint32_t queueFamilyPropertyCount;
10825 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount,
nullptr );
10826 queueFamilyProperties.resize( queueFamilyPropertyCount );
10827 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
10828 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
10831 if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
10833 queueFamilyProperties.resize( queueFamilyPropertyCount );
10835 return queueFamilyProperties;
10838 template <
typename StructureChain,
typename StructureChainAllocator,
typename Dispatch>
10844 std::vector<StructureChain, StructureChainAllocator> structureChains;
10845 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
10846 uint32_t queueFamilyPropertyCount;
10847 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount,
nullptr );
10848 structureChains.resize( queueFamilyPropertyCount );
10849 queueFamilyProperties.resize( queueFamilyPropertyCount );
10850 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
10852 queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
10854 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
10855 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
10858 if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
10860 structureChains.resize( queueFamilyPropertyCount );
10862 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
10864 structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
10866 return structureChains;
10870 typename StructureChainAllocator,
10879 std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator );
10880 std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
10881 uint32_t queueFamilyPropertyCount;
10882 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount,
nullptr );
10883 structureChains.resize( queueFamilyPropertyCount );
10884 queueFamilyProperties.resize( queueFamilyPropertyCount );
10885 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
10887 queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
10889 d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
10890 m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
10893 if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
10895 structureChains.resize( queueFamilyPropertyCount );
10897 for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
10899 structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
10901 return structureChains;
10905 template <
typename Dispatch>
10910 d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
10913 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10914 template <
typename Dispatch>
10921 d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
10923 return memoryProperties;
10926 template <
typename X,
typename Y,
typename...
Z,
typename Dispatch>
10933 structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
10934 d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
10936 return structureChain;
10940 template <
typename Dispatch>
10942 uint32_t * pPropertyCount,
10947 d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice,
10948 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ),
10950 reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) );
10953 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
10954 template <
typename SparseImageFormatProperties2Allocator,
typename Dispatch>
10957 Dispatch
const & d )
const
10961 std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties;
10962 uint32_t propertyCount;
10963 d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
10964 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount,
nullptr );
10965 properties.resize( propertyCount );
10966 d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice,
10967 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
10969 reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
10972 if ( propertyCount < properties.size() )
10974 properties.resize( propertyCount );
10979 template <
typename SparseImageFormatProperties2Allocator,
10985 SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,
10986 Dispatch
const & d )
const
10990 std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator );
10991 uint32_t propertyCount;
10992 d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
10993 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount,
nullptr );
10994 properties.resize( propertyCount );
10995 d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice,
10996 reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
10998 reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
11001 if ( propertyCount < properties.size() )
11003 properties.resize( propertyCount );
11011 template <
typename Dispatch>
11013 uint32_t localDeviceIndex,
11014 uint32_t remoteDeviceIndex,
11019 d.vkGetDeviceGroupPeerMemoryFeaturesKHR(
11020 m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
11023 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11024 template <
typename Dispatch>
11026 uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch
const & d )
const VULKAN_HPP_NOEXCEPT
11031 d.vkGetDeviceGroupPeerMemoryFeaturesKHR(
11032 m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
11034 return peerMemoryFeatures;
11038 template <
typename Dispatch>
11042 d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask );
11045 template <
typename Dispatch>
11047 uint32_t baseGroupY,
11048 uint32_t baseGroupZ,
11049 uint32_t groupCountX,
11050 uint32_t groupCountY,
11051 uint32_t groupCountZ,
11055 d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
11058 #if defined( VK_USE_PLATFORM_VI_NN )
11061 template <
typename Dispatch>
11068 return static_cast<Result>( d.vkCreateViSurfaceNN( m_instance,
11069 reinterpret_cast<const VkViSurfaceCreateInfoNN *>( pCreateInfo ),
11070 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
11071 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
11074 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11075 template <
typename Dispatch>
11077 Instance::createViSurfaceNN(
const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo,
11078 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
11079 Dispatch
const & d )
const
11085 d.vkCreateViSurfaceNN( m_instance,
11086 reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ),
11087 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
11088 reinterpret_cast<VkSurfaceKHR *>( &surface ) );
11094 # ifndef VULKAN_HPP_NO_SMART_HANDLE
11095 template <
typename Dispatch>
11097 Instance::createViSurfaceNNUnique(
const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo,
11098 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
11099 Dispatch
const & d )
const
11105 d.vkCreateViSurfaceNN( m_instance,
11106 reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ),
11107 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
11108 reinterpret_cast<VkSurfaceKHR *>( &surface ) );
11112 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
11113 UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *
this, allocator, d ) ) );
11121 template <
typename Dispatch>
11127 d.vkTrimCommandPoolKHR( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>(
flags ) );
11132 template <
typename Dispatch>
11139 return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR(
11140 m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) );
11143 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11144 template <
typename PhysicalDeviceGroupPropertiesAllocator,
typename Dispatch>
11151 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties;
11152 uint32_t physicalDeviceGroupCount;
11156 result = d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount,
nullptr );
11157 if ( ( result ==
VK_SUCCESS ) && physicalDeviceGroupCount )
11159 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
11160 result = d.vkEnumeratePhysicalDeviceGroupsKHR(
11161 m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
11165 VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
11166 if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
11168 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
11170 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties );
11173 template <
typename PhysicalDeviceGroupPropertiesAllocator,
11183 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties(
11184 physicalDeviceGroupPropertiesAllocator );
11185 uint32_t physicalDeviceGroupCount;
11189 result = d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount,
nullptr );
11190 if ( ( result ==
VK_SUCCESS ) && physicalDeviceGroupCount )
11192 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
11193 result = d.vkEnumeratePhysicalDeviceGroupsKHR(
11194 m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
11198 VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
11199 if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
11201 physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
11203 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties );
11209 template <
typename Dispatch>
11215 d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice,
11216 reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ),
11217 reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) );
11220 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11221 template <
typename Dispatch>
11229 d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice,
11230 reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ),
11231 reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
11233 return externalBufferProperties;
11237 #if defined( VK_USE_PLATFORM_WIN32_KHR )
11240 template <
typename Dispatch>
11246 return static_cast<Result>(
11247 d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
11250 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11251 template <
typename Dispatch>
11253 Device::getMemoryWin32HandleKHR(
const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch
const & d )
const
11258 VkResult result = d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle );
11265 template <
typename Dispatch>
11269 VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties,
11273 return static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( m_device,
11274 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
11276 reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( pMemoryWin32HandleProperties ) ) );
11279 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11280 template <
typename Dispatch>
11286 VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties;
11287 VkResult result = d.vkGetMemoryWin32HandlePropertiesKHR( m_device,
11288 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
11290 reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( &memoryWin32HandleProperties ) );
11293 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryWin32HandleProperties );
11300 template <
typename Dispatch>
11306 return static_cast<Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
11309 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11310 template <
typename Dispatch>
11312 Dispatch
const & d )
const
11317 VkResult result = d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( &getFdInfo ), &fd );
11324 template <
typename Dispatch>
11331 return static_cast<Result>( d.vkGetMemoryFdPropertiesKHR(
11332 m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
fd, reinterpret_cast<VkMemoryFdPropertiesKHR *>( pMemoryFdProperties ) ) );
11335 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11336 template <
typename Dispatch>
11343 VkResult result = d.vkGetMemoryFdPropertiesKHR(
11344 m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR *>( &memoryFdProperties ) );
11347 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryFdProperties );
11353 template <
typename Dispatch>
11360 d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice,
11361 reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ),
11362 reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) );
11365 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11366 template <
typename Dispatch>
11374 d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice,
11375 reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ),
11376 reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
11378 return externalSemaphoreProperties;
11382 #if defined( VK_USE_PLATFORM_WIN32_KHR )
11385 template <
typename Dispatch>
11387 const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, Dispatch
const & d )
const VULKAN_HPP_NOEXCEPT
11390 return static_cast<Result>(
11391 d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( pImportSemaphoreWin32HandleInfo ) ) );
11394 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11395 template <
typename Dispatch>
11397 Device::importSemaphoreWin32HandleKHR(
const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo,
11398 Dispatch
const & d )
const
11403 d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( &importSemaphoreWin32HandleInfo ) );
11410 template <
typename Dispatch>
11412 const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle, Dispatch
const & d )
const VULKAN_HPP_NOEXCEPT
11415 return static_cast<Result>(
11416 d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
11419 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11420 template <
typename Dispatch>
11422 Device::getSemaphoreWin32HandleKHR(
const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch
const & d )
const
11427 VkResult result = d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle );
11437 template <
typename Dispatch>
11442 return static_cast<Result>( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( pImportSemaphoreFdInfo ) ) );
11445 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11446 template <
typename Dispatch>
11452 VkResult result = d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( &importSemaphoreFdInfo ) );
11459 template <
typename Dispatch>
11465 return static_cast<Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
11468 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11469 template <
typename Dispatch>
11476 VkResult result = d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( &getFdInfo ), &fd );
11485 template <
typename Dispatch>
11489 uint32_t descriptorWriteCount,
11494 d.vkCmdPushDescriptorSetKHR( m_commandBuffer,
11495 static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
11496 static_cast<VkPipelineLayout>( layout ),
11498 descriptorWriteCount,
11499 reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ) );
11502 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11503 template <
typename Dispatch>
11513 d.vkCmdPushDescriptorSetKHR( m_commandBuffer,
11514 static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
11515 static_cast<VkPipelineLayout>( layout ),
11517 descriptorWrites.size(),
11522 template <
typename Dispatch>
11526 const void * pData,
11530 d.vkCmdPushDescriptorSetWithTemplateKHR(
11531 m_commandBuffer, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), static_cast<VkPipelineLayout>( layout ), set, pData );
11534 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11535 template <
typename DataType,
typename Dispatch>
11544 d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer,
11545 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
11546 static_cast<VkPipelineLayout>( layout ),
11548 reinterpret_cast<const void *>( &
data ) );
11554 template <
typename Dispatch>
11559 d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( pConditionalRenderingBegin ) );
11562 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11563 template <
typename Dispatch>
11569 d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( &conditionalRenderingBegin ) );
11573 template <
typename Dispatch>
11577 d.vkCmdEndConditionalRenderingEXT( m_commandBuffer );
11582 template <
typename Dispatch>
11590 return static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device,
11591 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ),
11592 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
11593 reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) );
11596 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11597 template <
typename Dispatch>
11601 Dispatch
const & d )
const
11606 VkResult result = d.vkCreateDescriptorUpdateTemplateKHR(
11608 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
11609 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
11610 reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) );
11613 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorUpdateTemplate );
11616 # ifndef VULKAN_HPP_NO_SMART_HANDLE
11617 template <
typename Dispatch>
11621 Dispatch
const & d )
const
11626 VkResult result = d.vkCreateDescriptorUpdateTemplateKHR(
11628 reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
11629 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
11630 reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) );
11640 template <
typename Dispatch>
11646 d.vkDestroyDescriptorUpdateTemplateKHR(
11647 m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
11650 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11651 template <
typename Dispatch>
11658 d.vkDestroyDescriptorUpdateTemplateKHR(
11660 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
11661 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
11665 template <
typename Dispatch>
11668 const void * pData,
11672 d.vkUpdateDescriptorSetWithTemplateKHR(
11673 m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
11676 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11677 template <
typename DataType,
typename Dispatch>
11685 d.vkUpdateDescriptorSetWithTemplateKHR( m_device,
11686 static_cast<VkDescriptorSet>( descriptorSet ),
11687 static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
11688 reinterpret_cast<const void *>( &
data ) );
11694 template <
typename Dispatch>
11696 uint32_t viewportCount,
11701 d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportWScalingNV *>( pViewportWScalings ) );
11704 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11705 template <
typename Dispatch>
11713 d.vkCmdSetViewportWScalingNV(
11714 m_commandBuffer, firstViewport, viewportWScalings.size(),
reinterpret_cast<const VkViewportWScalingNV *
>( viewportWScalings.data() ) );
11720 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
11721 template <
typename Dispatch>
11725 return static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
11728 template <
typename Dispatch>
11733 d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) );
11737 #if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
11740 template <
typename Dispatch>
11746 return static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast<VkDisplayKHR>( display ) ) );
11749 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11750 template <
typename Dispatch>
11756 VkResult result = d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast<VkDisplayKHR>( display ) );
11763 template <
typename Dispatch>
11770 return static_cast<Result>( d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) );
11773 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11774 template <
typename Dispatch>
11776 PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch
const & d )
const
11781 VkResult result = d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) );
11787 # ifndef VULKAN_HPP_NO_SMART_HANDLE
11788 template <
typename Dispatch>
11790 PhysicalDevice::getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch
const & d )
const
11795 VkResult result = d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) );
11799 UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, ObjectRelease<PhysicalDevice, Dispatch>( *
this, d ) ) );
11807 template <
typename Dispatch>
11814 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT(
11815 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT *>( pSurfaceCapabilities ) ) );
11818 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11819 template <
typename Dispatch>
11826 VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilities2EXT(
11827 m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT *>( &surfaceCapabilities ) );
11830 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceCapabilities );
11836 template <
typename Dispatch>
11842 return static_cast<Result>(
11843 d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( pDisplayPowerInfo ) ) );
11846 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11847 template <
typename Dispatch>
11850 Dispatch
const & d )
const
11855 d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( &displayPowerInfo ) );
11862 template <
typename Dispatch>
11869 return static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device,
11870 reinterpret_cast<const VkDeviceEventInfoEXT *>( pDeviceEventInfo ),
11871 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
11872 reinterpret_cast<VkFence *>( pFence ) ) );
11875 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11876 template <
typename Dispatch>
11880 Dispatch
const & d )
const
11885 VkResult result = d.vkRegisterDeviceEventEXT(
11887 reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ),
11888 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
11889 reinterpret_cast<VkFence *>( &fence ) );
11895 # ifndef VULKAN_HPP_NO_SMART_HANDLE
11896 template <
typename Dispatch>
11900 Dispatch
const & d )
const
11905 VkResult result = d.vkRegisterDeviceEventEXT(
11907 reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ),
11908 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
11909 reinterpret_cast<VkFence *>( &fence ) );
11918 template <
typename Dispatch>
11926 return static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device,
11927 static_cast<VkDisplayKHR>( display ),
11928 reinterpret_cast<const VkDisplayEventInfoEXT *>( pDisplayEventInfo ),
11929 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
11930 reinterpret_cast<VkFence *>( pFence ) ) );
11933 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11934 template <
typename Dispatch>
11939 Dispatch
const & d )
const
11944 VkResult result = d.vkRegisterDisplayEventEXT(
11946 static_cast<VkDisplayKHR>( display ),
11947 reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ),
11948 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
11949 reinterpret_cast<VkFence *>( &fence ) );
11955 # ifndef VULKAN_HPP_NO_SMART_HANDLE
11956 template <
typename Dispatch>
11961 Dispatch
const & d )
const
11966 VkResult result = d.vkRegisterDisplayEventEXT(
11968 static_cast<VkDisplayKHR>( display ),
11969 reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ),
11970 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
11971 reinterpret_cast<VkFence *>( &fence ) );
11980 template <
typename Dispatch>
11983 uint64_t * pCounterValue,
11987 return static_cast<Result>(
11988 d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), pCounterValue ) );
11991 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
11992 template <
typename Dispatch>
11998 uint64_t counterValue;
12000 d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), &counterValue );
12009 template <
typename Dispatch>
12016 return static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE(
12017 m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( pDisplayTimingProperties ) ) );
12020 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12021 template <
typename Dispatch>
12028 VkResult result = d.vkGetRefreshCycleDurationGOOGLE(
12029 m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( &displayTimingProperties ) );
12032 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), displayTimingProperties );
12036 template <
typename Dispatch>
12039 uint32_t * pPresentationTimingCount,
12044 return static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device,
12045 static_cast<VkSwapchainKHR>( swapchain ),
12046 pPresentationTimingCount,
12047 reinterpret_cast<VkPastPresentationTimingGOOGLE *>( pPresentationTimings ) ) );
12050 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12051 template <
typename PastPresentationTimingGOOGLEAllocator,
typename Dispatch>
12058 std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings;
12059 uint32_t presentationTimingCount;
12063 result = d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount,
nullptr );
12064 if ( ( result ==
VK_SUCCESS ) && presentationTimingCount )
12066 presentationTimings.resize( presentationTimingCount );
12067 result = d.vkGetPastPresentationTimingGOOGLE( m_device,
12068 static_cast<VkSwapchainKHR>( swapchain ),
12069 &presentationTimingCount,
12070 reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) );
12075 if ( presentationTimingCount < presentationTimings.size() )
12077 presentationTimings.resize( presentationTimingCount );
12079 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentationTimings );
12082 template <
typename PastPresentationTimingGOOGLEAllocator,
12089 PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator,
12090 Dispatch
const & d )
const
12094 std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings(
12095 pastPresentationTimingGOOGLEAllocator );
12096 uint32_t presentationTimingCount;
12100 result = d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount,
nullptr );
12101 if ( ( result ==
VK_SUCCESS ) && presentationTimingCount )
12103 presentationTimings.resize( presentationTimingCount );
12104 result = d.vkGetPastPresentationTimingGOOGLE( m_device,
12105 static_cast<VkSwapchainKHR>( swapchain ),
12106 &presentationTimingCount,
12107 reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) );
12112 if ( presentationTimingCount < presentationTimings.size() )
12114 presentationTimings.resize( presentationTimingCount );
12116 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentationTimings );
12122 template <
typename Dispatch>
12124 uint32_t discardRectangleCount,
12129 d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast<const VkRect2D *>( pDiscardRectangles ) );
12132 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12133 template <
typename Dispatch>
12140 d.vkCmdSetDiscardRectangleEXT(
12141 m_commandBuffer, firstDiscardRectangle, discardRectangles.size(),
reinterpret_cast<const VkRect2D *
>( discardRectangles.data() ) );
12147 template <
typename Dispatch>
12154 d.vkSetHdrMetadataEXT(
12155 m_device, swapchainCount, reinterpret_cast<const VkSwapchainKHR *>( pSwapchains ), reinterpret_cast<const VkHdrMetadataEXT *>( pMetadata ) );
12158 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12159 template <
typename Dispatch>
12165 # ifdef VULKAN_HPP_NO_EXCEPTIONS
12168 if ( swapchains.size() != metadata.size() )
12174 d.vkSetHdrMetadataEXT( m_device,
12176 reinterpret_cast<const VkSwapchainKHR *
>( swapchains.data() ),
12177 reinterpret_cast<const VkHdrMetadataEXT *>( metadata.data() ) );
12183 template <
typename Dispatch>
12190 return static_cast<Result>( d.vkCreateRenderPass2KHR( m_device,
12191 reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ),
12192 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
12193 reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
12196 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12197 template <
typename Dispatch>
12201 Dispatch
const & d )
const
12207 d.vkCreateRenderPass2KHR( m_device,
12208 reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
12209 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
12210 reinterpret_cast<VkRenderPass *>( &renderPass ) );
12216 # ifndef VULKAN_HPP_NO_SMART_HANDLE
12217 template <
typename Dispatch>
12221 Dispatch
const & d )
const
12227 d.vkCreateRenderPass2KHR( m_device,
12228 reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
12229 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
12230 reinterpret_cast<VkRenderPass *>( &renderPass ) );
12234 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
12240 template <
typename Dispatch>
12246 d.vkCmdBeginRenderPass2KHR(
12247 m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
12250 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12251 template <
typename Dispatch>
12258 d.vkCmdBeginRenderPass2KHR(
12259 m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
12263 template <
typename Dispatch>
12269 d.vkCmdNextSubpass2KHR(
12270 m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
12273 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12274 template <
typename Dispatch>
12281 d.vkCmdNextSubpass2KHR(
12282 m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
12286 template <
typename Dispatch>
12291 d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
12294 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12295 template <
typename Dispatch>
12301 d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
12307 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
12308 template <
typename Dispatch>
12313 return static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
12316 template <
typename Dispatch>
12318 Dispatch
const & d )
const
12322 VkResult result = d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) );
12323 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
12333 template <
typename Dispatch>
12339 d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice,
12340 reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ),
12341 reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) );
12344 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12345 template <
typename Dispatch>
12353 d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice,
12354 reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ),
12355 reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
12357 return externalFenceProperties;
12361 #if defined( VK_USE_PLATFORM_WIN32_KHR )
12364 template <
typename Dispatch>
12366 const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, Dispatch
const & d )
const VULKAN_HPP_NOEXCEPT
12369 return static_cast<Result>(
12370 d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( pImportFenceWin32HandleInfo ) ) );
12373 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12374 template <
typename Dispatch>
12376 Device::importFenceWin32HandleKHR(
const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch
const & d )
const
12380 VkResult result = d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( &importFenceWin32HandleInfo ) );
12387 template <
typename Dispatch>
12393 return static_cast<Result>(
12394 d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
12397 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12398 template <
typename Dispatch>
12400 Device::getFenceWin32HandleKHR(
const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch
const & d )
const
12405 VkResult result = d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle );
12415 template <
typename Dispatch>
12420 return static_cast<Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( pImportFenceFdInfo ) ) );
12423 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12424 template <
typename Dispatch>
12430 VkResult result = d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( &importFenceFdInfo ) );
12437 template <
typename Dispatch>
12443 return static_cast<Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
12446 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12447 template <
typename Dispatch>
12449 Dispatch
const & d )
const
12454 VkResult result = d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( &getFdInfo ), &fd );
12463 template <
typename Dispatch>
12466 uint32_t * pCounterCount,
12472 return static_cast<Result>(
12473 d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice,
12476 reinterpret_cast<VkPerformanceCounterKHR *>( pCounters ),
12477 reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( pCounterDescriptions ) ) );
12480 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12481 template <
typename PerformanceCounterKHRAllocator,
typename PerformanceCounterDescriptionKHRAllocator,
typename Dispatch>
12484 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>
::type
12489 std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
12490 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>
12492 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data.first;
12493 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = data.second;
12494 uint32_t counterCount;
12498 result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount,
nullptr,
nullptr );
12499 if ( ( result ==
VK_SUCCESS ) && counterCount )
12501 counters.resize( counterCount );
12502 counterDescriptions.resize( counterCount );
12503 result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
12507 reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
12508 reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) );
12511 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
12514 if ( counterCount < counters.size() )
12516 counters.resize( counterCount );
12517 counterDescriptions.resize( counterCount );
12522 template <
typename PerformanceCounterKHRAllocator,
12523 typename PerformanceCounterDescriptionKHRAllocator,
12532 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>
::type
12534 PerformanceCounterKHRAllocator & performanceCounterKHRAllocator,
12535 PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator,
12536 Dispatch
const & d )
const
12540 std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
12541 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>
12543 std::piecewise_construct, std::forward_as_tuple( performanceCounterKHRAllocator ), std::forward_as_tuple( performanceCounterDescriptionKHRAllocator ) );
12544 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data.first;
12545 std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = data.second;
12546 uint32_t counterCount;
12550 result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount,
nullptr,
nullptr );
12551 if ( ( result ==
VK_SUCCESS ) && counterCount )
12553 counters.resize( counterCount );
12554 counterDescriptions.resize( counterCount );
12555 result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
12559 reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
12560 reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) );
12563 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
12566 if ( counterCount < counters.size() )
12568 counters.resize( counterCount );
12569 counterDescriptions.resize( counterCount );
12575 template <
typename Dispatch>
12578 uint32_t * pNumPasses,
12582 d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
12583 m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( pPerformanceQueryCreateInfo ), pNumPasses );
12586 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12587 template <
typename Dispatch>
12593 uint32_t numPasses;
12594 d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
12595 m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( &performanceQueryCreateInfo ), &numPasses );
12601 template <
typename Dispatch>
12606 return static_cast<Result>( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( pInfo ) ) );
12609 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12610 template <
typename Dispatch>
12616 VkResult result = d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( &info ) );
12623 template <
typename Dispatch>
12627 d.vkReleaseProfilingLockKHR( m_device );
12632 template <
typename Dispatch>
12639 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice,
12640 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
12641 reinterpret_cast<VkSurfaceCapabilities2KHR *>( pSurfaceCapabilities ) ) );
12644 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12645 template <
typename Dispatch>
12652 VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice,
12653 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
12654 reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) );
12657 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceCapabilities );
12660 template <
typename X,
typename Y,
typename...
Z,
typename Dispatch>
12668 VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice,
12669 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
12670 reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) );
12673 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
12677 template <
typename Dispatch>
12679 uint32_t * pSurfaceFormatCount,
12684 return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
12685 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
12686 pSurfaceFormatCount,
12687 reinterpret_cast<VkSurfaceFormat2KHR *>( pSurfaceFormats ) ) );
12690 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12691 template <
typename SurfaceFormat2KHRAllocator,
typename Dispatch>
12697 std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats;
12698 uint32_t surfaceFormatCount;
12702 result = d.vkGetPhysicalDeviceSurfaceFormats2KHR(
12703 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount,
nullptr );
12704 if ( ( result ==
VK_SUCCESS ) && surfaceFormatCount )
12706 surfaceFormats.resize( surfaceFormatCount );
12707 result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
12708 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
12709 &surfaceFormatCount,
12710 reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
12715 if ( surfaceFormatCount < surfaceFormats.size() )
12717 surfaceFormats.resize( surfaceFormatCount );
12719 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats );
12722 template <
typename SurfaceFormat2KHRAllocator,
12728 SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator,
12729 Dispatch
const & d )
const
12733 std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats( surfaceFormat2KHRAllocator );
12734 uint32_t surfaceFormatCount;
12738 result = d.vkGetPhysicalDeviceSurfaceFormats2KHR(
12739 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount,
nullptr );
12740 if ( ( result ==
VK_SUCCESS ) && surfaceFormatCount )
12742 surfaceFormats.resize( surfaceFormatCount );
12743 result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
12744 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
12745 &surfaceFormatCount,
12746 reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
12751 if ( surfaceFormatCount < surfaceFormats.size() )
12753 surfaceFormats.resize( surfaceFormatCount );
12755 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats );
12758 template <
typename StructureChain,
typename StructureChainAllocator,
typename Dispatch>
12764 std::vector<StructureChain, StructureChainAllocator> structureChains;
12765 std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats;
12766 uint32_t surfaceFormatCount;
12770 result = d.vkGetPhysicalDeviceSurfaceFormats2KHR(
12771 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount,
nullptr );
12772 if ( ( result ==
VK_SUCCESS ) && surfaceFormatCount )
12774 structureChains.resize( surfaceFormatCount );
12775 surfaceFormats.resize( surfaceFormatCount );
12776 for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
12778 surfaceFormats[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>().pNext;
12780 result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
12781 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
12782 &surfaceFormatCount,
12783 reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
12788 if ( surfaceFormatCount < surfaceFormats.size() )
12790 structureChains.resize( surfaceFormatCount );
12792 for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
12794 structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>() = surfaceFormats[i];
12796 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChains );
12800 typename StructureChainAllocator,
12806 StructureChainAllocator & structureChainAllocator,
12807 Dispatch
const & d )
const
12811 std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator );
12812 std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats;
12813 uint32_t surfaceFormatCount;
12817 result = d.vkGetPhysicalDeviceSurfaceFormats2KHR(
12818 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount,
nullptr );
12819 if ( ( result ==
VK_SUCCESS ) && surfaceFormatCount )
12821 structureChains.resize( surfaceFormatCount );
12822 surfaceFormats.resize( surfaceFormatCount );
12823 for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
12825 surfaceFormats[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>().pNext;
12827 result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
12828 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
12829 &surfaceFormatCount,
12830 reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
12835 if ( surfaceFormatCount < surfaceFormats.size() )
12837 structureChains.resize( surfaceFormatCount );
12839 for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
12841 structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>() = surfaceFormats[i];
12843 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChains );
12849 template <
typename Dispatch>
12855 return static_cast<Result>(
12856 d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( pProperties ) ) );
12859 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12860 template <
typename DisplayProperties2KHRAllocator,
typename Dispatch>
12867 std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties;
12868 uint32_t propertyCount;
12872 result = d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount,
nullptr );
12873 if ( ( result ==
VK_SUCCESS ) && propertyCount )
12875 properties.resize( propertyCount );
12877 d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) );
12882 if ( propertyCount < properties.size() )
12884 properties.resize( propertyCount );
12889 template <
typename DisplayProperties2KHRAllocator,
12899 std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties( displayProperties2KHRAllocator );
12900 uint32_t propertyCount;
12904 result = d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount,
nullptr );
12905 if ( ( result ==
VK_SUCCESS ) && propertyCount )
12907 properties.resize( propertyCount );
12909 d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) );
12914 if ( propertyCount < properties.size() )
12916 properties.resize( propertyCount );
12922 template <
typename Dispatch>
12928 return static_cast<Result>(
12929 d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( pProperties ) ) );
12932 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
12933 template <
typename DisplayPlaneProperties2KHRAllocator,
typename Dispatch>
12940 std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties;
12941 uint32_t propertyCount;
12945 result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount,
nullptr );
12946 if ( ( result ==
VK_SUCCESS ) && propertyCount )
12948 properties.resize( propertyCount );
12949 result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
12950 m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) );
12955 if ( propertyCount < properties.size() )
12957 properties.resize( propertyCount );
12962 template <
typename DisplayPlaneProperties2KHRAllocator,
12972 std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties( displayPlaneProperties2KHRAllocator );
12973 uint32_t propertyCount;
12977 result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount,
nullptr );
12978 if ( ( result ==
VK_SUCCESS ) && propertyCount )
12980 properties.resize( propertyCount );
12981 result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
12982 m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) );
12987 if ( propertyCount < properties.size() )
12989 properties.resize( propertyCount );
12995 template <
typename Dispatch>
12997 uint32_t * pPropertyCount,
13002 return static_cast<Result>( d.vkGetDisplayModeProperties2KHR(
13003 m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( pProperties ) ) );
13006 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13007 template <
typename DisplayModeProperties2KHRAllocator,
typename Dispatch>
13014 std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties;
13015 uint32_t propertyCount;
13019 result = d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount,
nullptr );
13020 if ( ( result ==
VK_SUCCESS ) && propertyCount )
13022 properties.resize( propertyCount );
13023 result = d.vkGetDisplayModeProperties2KHR(
13024 m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) );
13029 if ( propertyCount < properties.size() )
13031 properties.resize( propertyCount );
13036 template <
typename DisplayModeProperties2KHRAllocator,
13043 DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator,
13044 Dispatch
const & d )
const
13048 std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties( displayModeProperties2KHRAllocator );
13049 uint32_t propertyCount;
13053 result = d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount,
nullptr );
13054 if ( ( result ==
VK_SUCCESS ) && propertyCount )
13056 properties.resize( propertyCount );
13057 result = d.vkGetDisplayModeProperties2KHR(
13058 m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) );
13063 if ( propertyCount < properties.size() )
13065 properties.resize( propertyCount );
13071 template <
typename Dispatch>
13078 return static_cast<Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice,
13079 reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( pDisplayPlaneInfo ),
13080 reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( pCapabilities ) ) );
13083 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13084 template <
typename Dispatch>
13091 VkResult result = d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice,
13092 reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( &displayPlaneInfo ),
13093 reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( &capabilities ) );
13100 #if defined( VK_USE_PLATFORM_IOS_MVK )
13103 template <
typename Dispatch>
13110 return static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance,
13111 reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( pCreateInfo ),
13112 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
13113 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
13116 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13117 template <
typename Dispatch>
13119 Instance::createIOSSurfaceMVK(
const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo,
13120 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
13121 Dispatch
const & d )
const
13127 d.vkCreateIOSSurfaceMVK( m_instance,
13128 reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ),
13129 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
13130 reinterpret_cast<VkSurfaceKHR *>( &surface ) );
13136 # ifndef VULKAN_HPP_NO_SMART_HANDLE
13137 template <
typename Dispatch>
13139 Instance::createIOSSurfaceMVKUnique(
const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo,
13140 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
13141 Dispatch
const & d )
const
13147 d.vkCreateIOSSurfaceMVK( m_instance,
13148 reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ),
13149 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
13150 reinterpret_cast<VkSurfaceKHR *>( &surface ) );
13154 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
13155 UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *
this, allocator, d ) ) );
13161 #if defined( VK_USE_PLATFORM_MACOS_MVK )
13164 template <
typename Dispatch>
13171 return static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance,
13172 reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( pCreateInfo ),
13173 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
13174 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
13177 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13178 template <
typename Dispatch>
13180 Instance::createMacOSSurfaceMVK(
const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo,
13181 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
13182 Dispatch
const & d )
const
13188 d.vkCreateMacOSSurfaceMVK( m_instance,
13189 reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ),
13190 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
13191 reinterpret_cast<VkSurfaceKHR *>( &surface ) );
13197 # ifndef VULKAN_HPP_NO_SMART_HANDLE
13198 template <
typename Dispatch>
13200 Instance::createMacOSSurfaceMVKUnique(
const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo,
13201 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
13202 Dispatch
const & d )
const
13208 d.vkCreateMacOSSurfaceMVK( m_instance,
13209 reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ),
13210 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
13211 reinterpret_cast<VkSurfaceKHR *>( &surface ) );
13215 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
13216 UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *
this, allocator, d ) ) );
13224 template <
typename Dispatch>
13229 return static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( pNameInfo ) ) );
13232 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13233 template <
typename Dispatch>
13239 VkResult result = d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( &nameInfo ) );
13246 template <
typename Dispatch>
13251 return static_cast<Result>( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( pTagInfo ) ) );
13254 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13255 template <
typename Dispatch>
13261 VkResult result = d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( &tagInfo ) );
13268 template <
typename Dispatch>
13273 d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
13276 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13277 template <
typename Dispatch>
13283 d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
13287 template <
typename Dispatch>
13291 d.vkQueueEndDebugUtilsLabelEXT( m_queue );
13294 template <
typename Dispatch>
13299 d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
13302 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13303 template <
typename Dispatch>
13309 d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
13313 template <
typename Dispatch>
13318 d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
13321 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13322 template <
typename Dispatch>
13328 d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
13332 template <
typename Dispatch>
13336 d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer );
13339 template <
typename Dispatch>
13344 d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
13347 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13348 template <
typename Dispatch>
13354 d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
13358 template <
typename Dispatch>
13366 return static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance,
13367 reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( pCreateInfo ),
13368 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
13369 reinterpret_cast<VkDebugUtilsMessengerEXT *>( pMessenger ) ) );
13372 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13373 template <
typename Dispatch>
13377 Dispatch
const & d )
const
13382 VkResult result = d.vkCreateDebugUtilsMessengerEXT(
13384 reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ),
13385 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
13386 reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) );
13392 # ifndef VULKAN_HPP_NO_SMART_HANDLE
13393 template <
typename Dispatch>
13397 Dispatch
const & d )
const
13402 VkResult result = d.vkCreateDebugUtilsMessengerEXT(
13404 reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ),
13405 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
13406 reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) );
13410 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
13416 template <
typename Dispatch>
13422 d.vkDestroyDebugUtilsMessengerEXT(
13423 m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
13426 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13427 template <
typename Dispatch>
13434 d.vkDestroyDebugUtilsMessengerEXT(
13436 static_cast<VkDebugUtilsMessengerEXT>( messenger ),
13437 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
13441 template <
typename Dispatch>
13447 d.vkDestroyDebugUtilsMessengerEXT(
13448 m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
13451 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13452 template <
typename Dispatch>
13459 d.vkDestroyDebugUtilsMessengerEXT(
13461 static_cast<VkDebugUtilsMessengerEXT>( messenger ),
13462 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
13466 template <
typename Dispatch>
13473 d.vkSubmitDebugUtilsMessageEXT( m_instance,
13474 static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ),
13475 static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ),
13476 reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( pCallbackData ) );
13479 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13480 template <
typename Dispatch>
13488 d.vkSubmitDebugUtilsMessageEXT( m_instance,
13489 static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ),
13490 static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ),
13491 reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( &callbackData ) );
13495 #if defined( VK_USE_PLATFORM_ANDROID_KHR )
13498 template <
typename Dispatch>
13500 Device::getAndroidHardwareBufferPropertiesANDROID(
const struct AHardwareBuffer *
buffer,
13501 VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties,
13505 return static_cast<Result>(
13506 d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device,
buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( pProperties ) ) );
13509 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13510 template <
typename Dispatch>
13512 Device::getAndroidHardwareBufferPropertiesANDROID(
const struct AHardwareBuffer &
buffer, Dispatch
const & d )
const
13516 VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties;
13518 d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) );
13524 template <
typename X,
typename Y,
typename...
Z,
typename Dispatch>
13526 Device::getAndroidHardwareBufferPropertiesANDROID(
const struct AHardwareBuffer & buffer, Dispatch
const & d )
const
13530 StructureChain<X, Y,
Z...> structureChain;
13531 VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties =
13532 structureChain.template get<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>();
13534 d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) );
13537 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
13541 template <
typename Dispatch>
13543 Device::getMemoryAndroidHardwareBufferANDROID(
const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo,
13544 struct AHardwareBuffer ** pBuffer,
13548 return static_cast<Result>(
13549 d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( pInfo ), pBuffer ) );
13552 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13553 template <
typename Dispatch>
13555 Device::getMemoryAndroidHardwareBufferANDROID(
const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch
const & d )
const
13559 struct AHardwareBuffer *
buffer;
13561 d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( &info ), &buffer );
13571 template <
typename Dispatch>
13576 d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT *>( pSampleLocationsInfo ) );
13579 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13580 template <
typename Dispatch>
13586 d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT *>( &sampleLocationsInfo ) );
13590 template <
typename Dispatch>
13596 d.vkGetPhysicalDeviceMultisamplePropertiesEXT(
13597 m_physicalDevice, static_cast<VkSampleCountFlagBits>(
samples ), reinterpret_cast<VkMultisamplePropertiesEXT *>( pMultisampleProperties ) );
13600 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13601 template <
typename Dispatch>
13608 d.vkGetPhysicalDeviceMultisamplePropertiesEXT(
13609 m_physicalDevice, static_cast<VkSampleCountFlagBits>(
samples ), reinterpret_cast<VkMultisamplePropertiesEXT *>( &multisampleProperties ) );
13611 return multisampleProperties;
13617 template <
typename Dispatch>
13623 d.vkGetImageMemoryRequirements2KHR(
13624 m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
13627 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13628 template <
typename Dispatch>
13635 d.vkGetImageMemoryRequirements2KHR(
13636 m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
13638 return memoryRequirements;
13641 template <
typename X,
typename Y,
typename...
Z,
typename Dispatch>
13649 d.vkGetImageMemoryRequirements2KHR(
13650 m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
13652 return structureChain;
13656 template <
typename Dispatch>
13662 d.vkGetBufferMemoryRequirements2KHR(
13663 m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
13666 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13667 template <
typename Dispatch>
13674 d.vkGetBufferMemoryRequirements2KHR(
13675 m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
13677 return memoryRequirements;
13680 template <
typename X,
typename Y,
typename...
Z,
typename Dispatch>
13688 d.vkGetBufferMemoryRequirements2KHR(
13689 m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
13691 return structureChain;
13695 template <
typename Dispatch>
13697 uint32_t * pSparseMemoryRequirementCount,
13702 d.vkGetImageSparseMemoryRequirements2KHR( m_device,
13703 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ),
13704 pSparseMemoryRequirementCount,
13705 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
13708 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13709 template <
typename SparseImageMemoryRequirements2Allocator,
typename Dispatch>
13715 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
13716 uint32_t sparseMemoryRequirementCount;
13717 d.vkGetImageSparseMemoryRequirements2KHR(
13718 m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount,
nullptr );
13719 sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
13720 d.vkGetImageSparseMemoryRequirements2KHR( m_device,
13721 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
13722 &sparseMemoryRequirementCount,
13723 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
13725 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
13726 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
13728 sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
13730 return sparseMemoryRequirements;
13733 template <
typename SparseImageMemoryRequirements2Allocator,
13739 SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
13740 Dispatch
const & d )
const
13744 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
13745 sparseImageMemoryRequirements2Allocator );
13746 uint32_t sparseMemoryRequirementCount;
13747 d.vkGetImageSparseMemoryRequirements2KHR(
13748 m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount,
nullptr );
13749 sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
13750 d.vkGetImageSparseMemoryRequirements2KHR( m_device,
13751 reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
13752 &sparseMemoryRequirementCount,
13753 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
13755 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
13756 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
13758 sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
13760 return sparseMemoryRequirements;
13766 template <
typename Dispatch>
13774 return static_cast<Result>( d.vkCreateAccelerationStructureKHR( m_device,
13775 reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( pCreateInfo ),
13776 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
13777 reinterpret_cast<VkAccelerationStructureKHR *>( pAccelerationStructure ) ) );
13780 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13781 template <
typename Dispatch>
13785 Dispatch
const & d )
const
13790 VkResult result = d.vkCreateAccelerationStructureKHR(
13792 reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ),
13793 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
13794 reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) );
13797 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), accelerationStructure );
13800 # ifndef VULKAN_HPP_NO_SMART_HANDLE
13801 template <
typename Dispatch>
13805 Dispatch
const & d )
const
13810 VkResult result = d.vkCreateAccelerationStructureKHR(
13812 reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ),
13813 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
13814 reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) );
13818 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
13824 template <
typename Dispatch>
13830 d.vkDestroyAccelerationStructureKHR(
13831 m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
13834 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13835 template <
typename Dispatch>
13842 d.vkDestroyAccelerationStructureKHR(
13844 static_cast<VkAccelerationStructureKHR>( accelerationStructure ),
13845 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
13849 template <
typename Dispatch>
13855 d.vkDestroyAccelerationStructureKHR(
13856 m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
13859 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13860 template <
typename Dispatch>
13867 d.vkDestroyAccelerationStructureKHR(
13869 static_cast<VkAccelerationStructureKHR>( accelerationStructure ),
13870 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
13874 template <
typename Dispatch>
13882 d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer,
13884 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
13885 reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) );
13888 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13889 template <
typename Dispatch>
13896 # ifdef VULKAN_HPP_NO_EXCEPTIONS
13899 if ( infos.size() != pBuildRangeInfos.size() )
13905 d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer,
13908 reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) );
13912 template <
typename Dispatch>
13916 const uint32_t * pIndirectStrides,
13917 const uint32_t *
const * ppMaxPrimitiveCounts,
13921 d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer,
13923 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
13924 reinterpret_cast<const VkDeviceAddress *>( pIndirectDeviceAddresses ),
13926 ppMaxPrimitiveCounts );
13929 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13930 template <
typename Dispatch>
13939 # ifdef VULKAN_HPP_NO_EXCEPTIONS
13944 if ( infos.size() != indirectDeviceAddresses.size() )
13948 if ( infos.size() != indirectStrides.size() )
13952 if ( infos.size() != pMaxPrimitiveCounts.size() )
13958 d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer,
13961 reinterpret_cast<const VkDeviceAddress *>( indirectDeviceAddresses.data() ),
13962 indirectStrides.data(),
13963 pMaxPrimitiveCounts.data() );
13967 template <
typename Dispatch>
13970 uint32_t infoCount,
13976 return static_cast<Result>(
13977 d.vkBuildAccelerationStructuresKHR( m_device,
13978 static_cast<VkDeferredOperationKHR>( deferredOperation ),
13980 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
13981 reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) ) );
13984 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
13985 template <
typename Dispatch>
13990 Dispatch
const & d )
const
13993 # ifdef VULKAN_HPP_NO_EXCEPTIONS
13996 if ( infos.
size() != pBuildRangeInfos.
size() )
14003 d.vkBuildAccelerationStructuresKHR( m_device,
14004 static_cast<VkDeferredOperationKHR>( deferredOperation ),
14007 reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.
data() ) );
14009 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
14017 template <
typename Dispatch>
14023 return static_cast<Result>( d.vkCopyAccelerationStructureKHR(
14024 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ) );
14027 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14028 template <
typename Dispatch>
14032 Dispatch
const & d )
const
14036 VkResult result = d.vkCopyAccelerationStructureKHR(
14037 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) );
14039 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
14047 template <
typename Dispatch>
14054 return static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR(
14055 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ) );
14058 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14059 template <
typename Dispatch>
14063 Dispatch
const & d )
const
14067 VkResult result = d.vkCopyAccelerationStructureToMemoryKHR(
14068 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) );
14070 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
14078 template <
typename Dispatch>
14085 return static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR(
14086 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ) );
14089 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14090 template <
typename Dispatch>
14094 Dispatch
const & d )
const
14098 VkResult result = d.vkCopyMemoryToAccelerationStructureKHR(
14099 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) );
14101 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
14109 template <
typename Dispatch>
14120 return static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR( m_device,
14121 accelerationStructureCount,
14122 reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ),
14123 static_cast<VkQueryType>( queryType ),
14129 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14130 template <
typename DataType,
typename DataTypeAllocator,
typename Dispatch>
14137 Dispatch
const & d )
const
14142 std::vector<DataType, DataTypeAllocator>
data( dataSize /
sizeof(
DataType ) );
14143 VkResult result = d.vkWriteAccelerationStructuresPropertiesKHR( m_device,
14144 accelerationStructures.
size(),
14145 reinterpret_cast<const VkAccelerationStructureKHR *
>( accelerationStructures.
data() ),
14146 static_cast<VkQueryType>( queryType ),
14148 reinterpret_cast<void *>( data.data() ),
14155 template <
typename DataType,
typename Dispatch>
14160 Dispatch
const & d )
const
14165 VkResult result = d.vkWriteAccelerationStructuresPropertiesKHR( m_device,
14166 accelerationStructures.
size(),
14167 reinterpret_cast<const VkAccelerationStructureKHR *
>( accelerationStructures.
data() ),
14168 static_cast<VkQueryType>( queryType ),
14170 reinterpret_cast<void *>( &data ),
14178 template <
typename Dispatch>
14183 d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) );
14186 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14187 template <
typename Dispatch>
14193 d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) );
14197 template <
typename Dispatch>
14202 d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) );
14205 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14206 template <
typename Dispatch>
14212 d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) );
14216 template <
typename Dispatch>
14221 d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) );
14224 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14225 template <
typename Dispatch>
14231 d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) );
14235 template <
typename Dispatch>
14241 d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( pInfo ) ) );
14244 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14245 template <
typename Dispatch>
14253 d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( &info ) );
14259 template <
typename Dispatch>
14265 uint32_t firstQuery,
14269 d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer,
14270 accelerationStructureCount,
14271 reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ),
14272 static_cast<VkQueryType>( queryType ),
14273 static_cast<VkQueryPool>( queryPool ),
14277 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14278 template <
typename Dispatch>
14283 uint32_t firstQuery,
14288 d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer,
14289 accelerationStructures.size(),
14290 reinterpret_cast<const VkAccelerationStructureKHR *
>( accelerationStructures.data() ),
14291 static_cast<VkQueryType>( queryType ),
14292 static_cast<VkQueryPool
>( queryPool ),
14297 template <
typename Dispatch>
14303 d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device,
14304 reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( pVersionInfo ),
14305 reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) );
14308 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14309 template <
typename Dispatch>
14317 d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device,
14318 reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( &versionInfo ),
14319 reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) );
14321 return compatibility;
14325 template <
typename Dispatch>
14328 const uint32_t * pMaxPrimitiveCounts,
14333 d.vkGetAccelerationStructureBuildSizesKHR( m_device,
14334 static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
14335 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pBuildInfo ),
14336 pMaxPrimitiveCounts,
14337 reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( pSizeInfo ) );
14340 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14341 template <
typename Dispatch>
14349 # ifdef VULKAN_HPP_NO_EXCEPTIONS
14352 if ( maxPrimitiveCounts.size() != buildInfo.geometryCount )
14359 d.vkGetAccelerationStructureBuildSizesKHR( m_device,
14360 static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
14361 reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( &buildInfo ),
14362 maxPrimitiveCounts.data(),
14371 template <
typename Dispatch>
14379 return static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device,
14380 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ),
14381 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
14382 reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) );
14385 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14386 template <
typename Dispatch>
14390 Dispatch
const & d )
const
14395 VkResult result = d.vkCreateSamplerYcbcrConversionKHR(
14397 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
14398 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
14399 reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) );
14402 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), ycbcrConversion );
14405 # ifndef VULKAN_HPP_NO_SMART_HANDLE
14406 template <
typename Dispatch>
14410 Dispatch
const & d )
const
14415 VkResult result = d.vkCreateSamplerYcbcrConversionKHR(
14417 reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
14418 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
14419 reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) );
14423 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
14429 template <
typename Dispatch>
14435 d.vkDestroySamplerYcbcrConversionKHR(
14436 m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
14439 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14440 template <
typename Dispatch>
14447 d.vkDestroySamplerYcbcrConversionKHR(
14449 static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
14450 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
14456 template <
typename Dispatch>
14462 return static_cast<Result>( d.vkBindBufferMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
14465 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14466 template <
typename Dispatch>
14469 Dispatch
const & d )
const
14480 template <
typename Dispatch>
14486 return static_cast<Result>( d.vkBindImageMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
14489 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14490 template <
typename Dispatch>
14505 template <
typename Dispatch>
14510 return static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT(
14511 m_device, static_cast<VkImage>(
image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( pProperties ) ) );
14514 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14515 template <
typename Dispatch>
14522 VkResult result = d.vkGetImageDrmFormatModifierPropertiesEXT(
14523 m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( &properties ) );
14532 template <
typename Dispatch>
14539 return static_cast<Result>( d.vkCreateValidationCacheEXT( m_device,
14540 reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( pCreateInfo ),
14541 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
14542 reinterpret_cast<VkValidationCacheEXT *>( pValidationCache ) ) );
14545 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14546 template <
typename Dispatch>
14550 Dispatch
const & d )
const
14555 VkResult result = d.vkCreateValidationCacheEXT(
14557 reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ),
14558 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
14559 reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) );
14562 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), validationCache );
14565 # ifndef VULKAN_HPP_NO_SMART_HANDLE
14566 template <
typename Dispatch>
14570 Dispatch
const & d )
const
14575 VkResult result = d.vkCreateValidationCacheEXT(
14577 reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ),
14578 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
14579 reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) );
14583 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
14589 template <
typename Dispatch>
14595 d.vkDestroyValidationCacheEXT(
14596 m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
14599 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14600 template <
typename Dispatch>
14607 d.vkDestroyValidationCacheEXT(
14609 static_cast<VkValidationCacheEXT>( validationCache ),
14610 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
14614 template <
typename Dispatch>
14620 d.vkDestroyValidationCacheEXT(
14621 m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
14624 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14625 template <
typename Dispatch>
14632 d.vkDestroyValidationCacheEXT(
14634 static_cast<VkValidationCacheEXT>( validationCache ),
14635 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
14639 template <
typename Dispatch>
14641 uint32_t srcCacheCount,
14646 return static_cast<Result>( d.vkMergeValidationCachesEXT(
14647 m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCacheCount, reinterpret_cast<const VkValidationCacheEXT *>( pSrcCaches ) ) );
14650 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14651 template <
typename Dispatch>
14655 Dispatch
const & d )
const
14659 VkResult result = d.vkMergeValidationCachesEXT(
14660 m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCaches.
size(),
reinterpret_cast<const VkValidationCacheEXT *
>( srcCaches.
data() ) );
14667 template <
typename Dispatch>
14669 size_t * pDataSize,
14674 return static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), pDataSize, pData ) );
14677 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14678 template <
typename U
int8_tAllocator,
typename Dispatch>
14684 std::vector<uint8_t, Uint8_tAllocator>
data;
14689 result = d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize,
nullptr );
14692 data.resize( dataSize );
14694 d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
14699 if ( dataSize < data.size() )
14701 data.resize( dataSize );
14706 template <
typename Uint8_tAllocator,
14715 std::vector<uint8_t, Uint8_tAllocator>
data( uint8_tAllocator );
14720 result = d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize,
nullptr );
14723 data.resize( dataSize );
14725 d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
14730 if ( dataSize < data.size() )
14732 data.resize( dataSize );
14740 template <
typename Dispatch>
14746 d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
14749 template <
typename Dispatch>
14751 uint32_t viewportCount,
14756 d.vkCmdSetViewportShadingRatePaletteNV(
14757 m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkShadingRatePaletteNV *>( pShadingRatePalettes ) );
14760 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14761 template <
typename Dispatch>
14763 uint32_t firstViewport,
14769 d.vkCmdSetViewportShadingRatePaletteNV(
14770 m_commandBuffer, firstViewport, shadingRatePalettes.size(),
reinterpret_cast<const VkShadingRatePaletteNV *
>( shadingRatePalettes.data() ) );
14774 template <
typename Dispatch>
14776 uint32_t customSampleOrderCount,
14781 d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer,
14782 static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ),
14783 customSampleOrderCount,
14784 reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( pCustomSampleOrders ) );
14787 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14788 template <
typename Dispatch>
14796 d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer,
14797 static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ),
14798 customSampleOrders.size(),
14805 template <
typename Dispatch>
14813 return static_cast<Result>( d.vkCreateAccelerationStructureNV( m_device,
14814 reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( pCreateInfo ),
14815 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
14816 reinterpret_cast<VkAccelerationStructureNV *>( pAccelerationStructure ) ) );
14819 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14820 template <
typename Dispatch>
14824 Dispatch
const & d )
const
14829 VkResult result = d.vkCreateAccelerationStructureNV(
14831 reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ),
14832 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
14833 reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) );
14836 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), accelerationStructure );
14839 # ifndef VULKAN_HPP_NO_SMART_HANDLE
14840 template <
typename Dispatch>
14844 Dispatch
const & d )
const
14849 VkResult result = d.vkCreateAccelerationStructureNV(
14851 reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ),
14852 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
14853 reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) );
14857 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
14863 template <
typename Dispatch>
14869 d.vkDestroyAccelerationStructureNV(
14870 m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
14873 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14874 template <
typename Dispatch>
14881 d.vkDestroyAccelerationStructureNV(
14883 static_cast<VkAccelerationStructureNV>( accelerationStructure ),
14884 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
14888 template <
typename Dispatch>
14894 d.vkDestroyAccelerationStructureNV(
14895 m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
14898 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14899 template <
typename Dispatch>
14906 d.vkDestroyAccelerationStructureNV(
14908 static_cast<VkAccelerationStructureNV>( accelerationStructure ),
14909 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
14913 template <
typename Dispatch>
14920 d.vkGetAccelerationStructureMemoryRequirementsNV( m_device,
14921 reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( pInfo ),
14922 reinterpret_cast<VkMemoryRequirements2KHR *>( pMemoryRequirements ) );
14925 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14926 template <
typename Dispatch>
14934 d.vkGetAccelerationStructureMemoryRequirementsNV( m_device,
14935 reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ),
14936 reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
14938 return memoryRequirements;
14941 template <
typename X,
typename Y,
typename...
Z,
typename Dispatch>
14950 d.vkGetAccelerationStructureMemoryRequirementsNV( m_device,
14951 reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ),
14952 reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
14954 return structureChain;
14958 template <
typename Dispatch>
14963 return static_cast<Result>(
14964 d.vkBindAccelerationStructureMemoryNV( m_device, bindInfoCount, reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( pBindInfos ) ) );
14967 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
14968 template <
typename Dispatch>
14974 VkResult result = d.vkBindAccelerationStructureMemoryNV(
14982 template <
typename Dispatch>
14994 d.vkCmdBuildAccelerationStructureNV( m_commandBuffer,
14995 reinterpret_cast<const VkAccelerationStructureInfoNV *>( pInfo ),
14996 static_cast<VkBuffer>( instanceData ),
14997 static_cast<VkDeviceSize>( instanceOffset ),
14998 static_cast<VkBool32>( update ),
14999 static_cast<VkAccelerationStructureNV>(
dst ),
15000 static_cast<VkAccelerationStructureNV>(
src ),
15001 static_cast<VkBuffer>( scratch ),
15002 static_cast<VkDeviceSize>( scratchOffset ) );
15005 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15006 template <
typename Dispatch>
15019 d.vkCmdBuildAccelerationStructureNV( m_commandBuffer,
15020 reinterpret_cast<const VkAccelerationStructureInfoNV *>( &info ),
15021 static_cast<VkBuffer>( instanceData ),
15022 static_cast<VkDeviceSize>( instanceOffset ),
15023 static_cast<VkBool32>( update ),
15024 static_cast<VkAccelerationStructureNV>(
dst ),
15025 static_cast<VkAccelerationStructureNV>(
src ),
15026 static_cast<VkBuffer>( scratch ),
15027 static_cast<VkDeviceSize>( scratchOffset ) );
15031 template <
typename Dispatch>
15038 d.vkCmdCopyAccelerationStructureNV( m_commandBuffer,
15039 static_cast<VkAccelerationStructureNV>(
dst ),
15040 static_cast<VkAccelerationStructureNV>(
src ),
15041 static_cast<VkCopyAccelerationStructureModeKHR>(
mode ) );
15044 template <
typename Dispatch>
15062 d.vkCmdTraceRaysNV( m_commandBuffer,
15063 static_cast<VkBuffer>( raygenShaderBindingTableBuffer ),
15064 static_cast<VkDeviceSize>( raygenShaderBindingOffset ),
15065 static_cast<VkBuffer>( missShaderBindingTableBuffer ),
15066 static_cast<VkDeviceSize>( missShaderBindingOffset ),
15067 static_cast<VkDeviceSize>( missShaderBindingStride ),
15068 static_cast<VkBuffer>( hitShaderBindingTableBuffer ),
15069 static_cast<VkDeviceSize>( hitShaderBindingOffset ),
15070 static_cast<VkDeviceSize>( hitShaderBindingStride ),
15071 static_cast<VkBuffer>( callableShaderBindingTableBuffer ),
15072 static_cast<VkDeviceSize>( callableShaderBindingOffset ),
15073 static_cast<VkDeviceSize>( callableShaderBindingStride ),
15079 template <
typename Dispatch>
15081 uint32_t createInfoCount,
15088 return static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device,
15089 static_cast<VkPipelineCache>( pipelineCache ),
15091 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( pCreateInfos ),
15092 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
15093 reinterpret_cast<VkPipeline *>( pPipelines ) ) );
15096 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15097 template <
typename PipelineAllocator,
typename Dispatch>
15102 Dispatch
const & d )
const
15106 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>
pipelines( createInfos.
size() );
15107 VkResult result = d.vkCreateRayTracingPipelinesNV(
15109 static_cast<VkPipelineCache>( pipelineCache ),
15110 createInfos.
size(),
15112 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
15113 reinterpret_cast<VkPipeline *>(
pipelines.data() ) );
15114 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
15121 template <
typename PipelineAllocator,
15129 PipelineAllocator & pipelineAllocator,
15130 Dispatch
const & d )
const
15134 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>
pipelines( createInfos.
size(), pipelineAllocator );
15135 VkResult result = d.vkCreateRayTracingPipelinesNV(
15137 static_cast<VkPipelineCache>( pipelineCache ),
15138 createInfos.
size(),
15140 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
15141 reinterpret_cast<VkPipeline *>(
pipelines.data() ) );
15142 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
15149 template <
typename Dispatch>
15154 Dispatch
const & d )
const
15159 VkResult result = d.vkCreateRayTracingPipelinesNV(
15161 static_cast<VkPipelineCache>( pipelineCache ),
15163 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ),
15164 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
15165 reinterpret_cast<VkPipeline *>( &pipeline ) );
15166 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
15173 # ifndef VULKAN_HPP_NO_SMART_HANDLE
15174 template <
typename Dispatch,
typename PipelineAllocator>
15179 Dispatch
const & d )
const
15183 std::vector<VULKAN_HPP_NAMESPACE::Pipeline>
pipelines( createInfos.
size() );
15184 VkResult result = d.vkCreateRayTracingPipelinesNV(
15186 static_cast<VkPipelineCache>( pipelineCache ),
15187 createInfos.
size(),
15189 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
15190 reinterpret_cast<VkPipeline *>(
pipelines.data() ) );
15191 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
15194 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
15195 uniquePipelines.reserve( createInfos.
size() );
15197 for (
auto const & pipeline :
pipelines )
15205 template <
typename Dispatch,
15206 typename PipelineAllocator,
15208 typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>
::value,
int>
::type>
15213 PipelineAllocator & pipelineAllocator,
15214 Dispatch
const & d )
const
15218 std::vector<VULKAN_HPP_NAMESPACE::Pipeline>
pipelines( createInfos.
size() );
15219 VkResult result = d.vkCreateRayTracingPipelinesNV(
15221 static_cast<VkPipelineCache>( pipelineCache ),
15222 createInfos.
size(),
15224 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
15225 reinterpret_cast<VkPipeline *>(
pipelines.data() ) );
15226 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
15229 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
15230 uniquePipelines.reserve( createInfos.
size() );
15232 for (
auto const & pipeline :
pipelines )
15240 template <
typename Dispatch>
15245 Dispatch
const & d )
const
15250 VkResult result = d.vkCreateRayTracingPipelinesNV(
15252 static_cast<VkPipelineCache>( pipelineCache ),
15254 reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ),
15255 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
15256 reinterpret_cast<VkPipeline *>( &pipeline ) );
15257 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
15268 template <
typename Dispatch>
15270 uint32_t firstGroup,
15271 uint32_t groupCount,
15277 return static_cast<Result>(
15278 d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
15281 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15282 template <
typename DataType,
typename DataTypeAllocator,
typename Dispatch>
15289 std::vector<DataType, DataTypeAllocator>
data( dataSize /
sizeof(
DataType ) );
15290 VkResult result = d.vkGetRayTracingShaderGroupHandlesNV(
15291 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() *
sizeof(
DataType ), reinterpret_cast<void *>( data.data() ) );
15297 template <
typename DataType,
typename Dispatch>
15304 VkResult result = d.vkGetRayTracingShaderGroupHandlesNV(
15305 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount,
sizeof(
DataType ), reinterpret_cast<void *>( &data ) );
15312 template <
typename Dispatch>
15319 return static_cast<Result>(
15320 d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), dataSize, pData ) );
15323 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15324 template <
typename DataType,
typename DataTypeAllocator,
typename Dispatch>
15331 std::vector<DataType, DataTypeAllocator>
data( dataSize /
sizeof(
DataType ) );
15332 VkResult result = d.vkGetAccelerationStructureHandleNV(
15333 m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), data.size() *
sizeof(
DataType ), reinterpret_cast<void *>( data.data() ) );
15339 template <
typename DataType,
typename Dispatch>
15346 VkResult result = d.vkGetAccelerationStructureHandleNV(
15347 m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ),
sizeof(
DataType ), reinterpret_cast<void *>( &data ) );
15354 template <
typename Dispatch>
15359 uint32_t firstQuery,
15363 d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer,
15364 accelerationStructureCount,
15365 reinterpret_cast<const VkAccelerationStructureNV *>( pAccelerationStructures ),
15366 static_cast<VkQueryType>( queryType ),
15367 static_cast<VkQueryPool>( queryPool ),
15371 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15372 template <
typename Dispatch>
15377 uint32_t firstQuery,
15382 d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer,
15383 accelerationStructures.size(),
15384 reinterpret_cast<const VkAccelerationStructureNV *
>( accelerationStructures.data() ),
15385 static_cast<VkQueryType>( queryType ),
15386 static_cast<VkQueryPool
>( queryPool ),
15391 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
15392 template <
typename Dispatch>
15398 return static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ),
shader ) );
15401 template <
typename Dispatch>
15407 VkResult result = d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader );
15416 template <
typename Dispatch>
15422 d.vkGetDescriptorSetLayoutSupportKHR(
15423 m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) );
15426 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15427 template <
typename Dispatch>
15435 d.vkGetDescriptorSetLayoutSupportKHR(
15436 m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
15441 template <
typename X,
typename Y,
typename...
Z,
typename Dispatch>
15450 d.vkGetDescriptorSetLayoutSupportKHR(
15451 m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
15453 return structureChain;
15459 template <
typename Dispatch>
15464 uint32_t maxDrawCount,
15469 d.vkCmdDrawIndirectCountKHR( m_commandBuffer,
15470 static_cast<VkBuffer>( buffer ),
15471 static_cast<VkDeviceSize>(
offset ),
15472 static_cast<VkBuffer>( countBuffer ),
15473 static_cast<VkDeviceSize>( countBufferOffset ),
15478 template <
typename Dispatch>
15483 uint32_t maxDrawCount,
15488 d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer,
15489 static_cast<VkBuffer>( buffer ),
15490 static_cast<VkDeviceSize>(
offset ),
15491 static_cast<VkBuffer>( countBuffer ),
15492 static_cast<VkDeviceSize>( countBufferOffset ),
15499 template <
typename Dispatch>
15502 const void * pHostPointer,
15507 return static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( m_device,
15508 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
15510 reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( pMemoryHostPointerProperties ) ) );
15513 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15514 template <
typename Dispatch>
15517 const void * pHostPointer,
15518 Dispatch
const & d )
const
15523 VkResult result = d.vkGetMemoryHostPointerPropertiesEXT( m_device,
15524 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
15526 reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( &memoryHostPointerProperties ) );
15529 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryHostPointerProperties );
15535 template <
typename Dispatch>
15543 d.vkCmdWriteBufferMarkerAMD( m_commandBuffer,
15544 static_cast<VkPipelineStageFlagBits>( pipelineStage ),
15545 static_cast<VkBuffer>( dstBuffer ),
15546 static_cast<VkDeviceSize>( dstOffset ),
15552 template <
typename Dispatch>
15558 return static_cast<Result>(
15559 d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, pTimeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( pTimeDomains ) ) );
15562 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15563 template <
typename TimeDomainEXTAllocator,
typename Dispatch>
15569 std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT, TimeDomainEXTAllocator> timeDomains;
15570 uint32_t timeDomainCount;
15574 result = d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount,
nullptr );
15575 if ( ( result ==
VK_SUCCESS ) && timeDomainCount )
15577 timeDomains.resize( timeDomainCount );
15579 d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( timeDomains.data() ) );
15584 if ( timeDomainCount < timeDomains.size() )
15586 timeDomains.resize( timeDomainCount );
15591 template <
typename TimeDomainEXTAllocator,
15600 std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT, TimeDomainEXTAllocator> timeDomains( timeDomainEXTAllocator );
15601 uint32_t timeDomainCount;
15605 result = d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount,
nullptr );
15606 if ( ( result ==
VK_SUCCESS ) && timeDomainCount )
15608 timeDomains.resize( timeDomainCount );
15610 d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( timeDomains.data() ) );
15615 if ( timeDomainCount < timeDomains.size() )
15617 timeDomains.resize( timeDomainCount );
15623 template <
typename Dispatch>
15626 uint64_t * pTimestamps,
15627 uint64_t * pMaxDeviation,
15631 return static_cast<Result>( d.vkGetCalibratedTimestampsEXT(
15632 m_device, timestampCount, reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( pTimestampInfos ), pTimestamps, pMaxDeviation ) );
15635 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15636 template <
typename U
int64_tAllocator,
typename Dispatch>
15639 Dispatch
const & d )
const
15643 std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>
data(
15644 std::piecewise_construct, std::forward_as_tuple( timestampInfos.
size() ), std::forward_as_tuple( 0 ) );
15645 std::vector<uint64_t, Uint64_tAllocator> & timestamps =
data.first;
15646 uint64_t & maxDeviation =
data.second;
15647 VkResult result = d.vkGetCalibratedTimestampsEXT(
15654 template <
typename Uint64_tAllocator,
15660 Uint64_tAllocator & uint64_tAllocator,
15661 Dispatch
const & d )
const
15665 std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>
data(
15666 std::piecewise_construct, std::forward_as_tuple( timestampInfos.
size(), uint64_tAllocator ), std::forward_as_tuple( 0 ) );
15667 std::vector<uint64_t, Uint64_tAllocator> & timestamps =
data.first;
15668 uint64_t & maxDeviation =
data.second;
15669 VkResult result = d.vkGetCalibratedTimestampsEXT(
15676 template <
typename Dispatch>
15682 std::pair<uint64_t, uint64_t>
data;
15683 uint64_t & timestamp = data.first;
15684 uint64_t & maxDeviation = data.second;
15686 d.vkGetCalibratedTimestampsEXT( m_device, 1, reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( ×tampInfo ), ×tamp, &maxDeviation );
15695 template <
typename Dispatch>
15699 d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask );
15702 template <
typename Dispatch>
15705 uint32_t drawCount,
15710 d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>(
offset ), drawCount,
stride );
15713 template <
typename Dispatch>
15718 uint32_t maxDrawCount,
15723 d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer,
15724 static_cast<VkBuffer>( buffer ),
15725 static_cast<VkDeviceSize>(
offset ),
15726 static_cast<VkBuffer>( countBuffer ),
15727 static_cast<VkDeviceSize>( countBufferOffset ),
15734 template <
typename Dispatch>
15736 uint32_t exclusiveScissorCount,
15741 d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkRect2D *>( pExclusiveScissors ) );
15744 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15745 template <
typename Dispatch>
15752 d.vkCmdSetExclusiveScissorNV(
15753 m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size(),
reinterpret_cast<const VkRect2D *
>( exclusiveScissors.data() ) );
15759 template <
typename Dispatch>
15763 d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker );
15766 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15767 template <
typename Checkpo
intMarkerType,
typename Dispatch>
15772 d.vkCmdSetCheckpointNV( m_commandBuffer, reinterpret_cast<const void *>( &checkpointMarker ) );
15776 template <
typename Dispatch>
15782 d.vkGetQueueCheckpointDataNV( m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( pCheckpointData ) );
15785 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15786 template <
typename Checkpo
intDataNVAllocator,
typename Dispatch>
15792 std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> checkpointData;
15793 uint32_t checkpointDataCount;
15794 d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount,
nullptr );
15795 checkpointData.resize( checkpointDataCount );
15796 d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
15799 if ( checkpointDataCount < checkpointData.size() )
15801 checkpointData.resize( checkpointDataCount );
15803 return checkpointData;
15806 template <
typename CheckpointDataNVAllocator,
15815 std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> checkpointData( checkpointDataNVAllocator );
15816 uint32_t checkpointDataCount;
15817 d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount,
nullptr );
15818 checkpointData.resize( checkpointDataCount );
15819 d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
15822 if ( checkpointDataCount < checkpointData.size() )
15824 checkpointData.resize( checkpointDataCount );
15826 return checkpointData;
15832 template <
typename Dispatch>
15838 return static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
15841 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15842 template <
typename Dispatch>
15849 VkResult result = d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), &value );
15856 template <
typename Dispatch>
15862 return static_cast<Result>( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ),
timeout ) );
15865 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15866 template <
typename Dispatch>
15872 VkResult result = d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout );
15873 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
15881 template <
typename Dispatch>
15886 return static_cast<Result>( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
15889 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15890 template <
typename Dispatch>
15896 VkResult result = d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) );
15905 template <
typename Dispatch>
15910 return static_cast<Result>(
15911 d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( pInitializeInfo ) ) );
15914 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15915 template <
typename Dispatch>
15921 VkResult result = d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( &initializeInfo ) );
15928 template <
typename Dispatch>
15932 d.vkUninitializePerformanceApiINTEL( m_device );
15935 template <
typename Dispatch>
15940 return static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( pMarkerInfo ) ) );
15943 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15944 template <
typename Dispatch>
15950 VkResult result = d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( &markerInfo ) );
15957 template <
typename Dispatch>
15962 return static_cast<Result>(
15963 d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( pMarkerInfo ) ) );
15966 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15967 template <
typename Dispatch>
15973 VkResult result = d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( &markerInfo ) );
15980 template <
typename Dispatch>
15985 return static_cast<Result>(
15986 d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( pOverrideInfo ) ) );
15989 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
15990 template <
typename Dispatch>
15996 VkResult result = d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( &overrideInfo ) );
16003 template <
typename Dispatch>
16010 return static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( m_device,
16011 reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( pAcquireInfo ),
16012 reinterpret_cast<VkPerformanceConfigurationINTEL *>( pConfiguration ) ) );
16015 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16016 template <
typename Dispatch>
16023 VkResult result = d.vkAcquirePerformanceConfigurationINTEL( m_device,
16024 reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ),
16025 reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) );
16028 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), configuration );
16031 # ifndef VULKAN_HPP_NO_SMART_HANDLE
16032 template <
typename Dispatch>
16035 Dispatch
const & d )
const
16040 VkResult result = d.vkAcquirePerformanceConfigurationINTEL( m_device,
16041 reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ),
16042 reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) );
16046 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
16052 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
16053 template <
typename Dispatch>
16058 return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
16061 template <
typename Dispatch>
16067 VkResult result = d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) );
16074 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
16075 template <
typename Dispatch>
16080 return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
16083 template <
typename Dispatch>
16089 VkResult result = d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) );
16096 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
16097 template <
typename Dispatch>
16102 return static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
16105 template <
typename Dispatch>
16111 VkResult result = d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) );
16118 template <
typename Dispatch>
16124 return static_cast<Result>( d.vkGetPerformanceParameterINTEL(
16125 m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( pValue ) ) );
16128 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16129 template <
typename Dispatch>
16136 VkResult result = d.vkGetPerformanceParameterINTEL(
16137 m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( &value ) );
16146 template <
typename Dispatch>
16152 d.vkSetLocalDimmingAMD( m_device, static_cast<VkSwapchainKHR>( swapChain ), static_cast<VkBool32>( localDimmingEnable ) );
16155 #if defined( VK_USE_PLATFORM_FUCHSIA )
16158 template <
typename Dispatch>
16160 Instance::createImagePipeSurfaceFUCHSIA(
const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo,
16166 return static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance,
16167 reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( pCreateInfo ),
16168 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
16169 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
16172 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16173 template <
typename Dispatch>
16175 Instance::createImagePipeSurfaceFUCHSIA(
const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,
16176 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
16177 Dispatch
const & d )
const
16182 VkResult result = d.vkCreateImagePipeSurfaceFUCHSIA(
16184 reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ),
16185 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
16186 reinterpret_cast<VkSurfaceKHR *>( &surface ) );
16192 # ifndef VULKAN_HPP_NO_SMART_HANDLE
16193 template <
typename Dispatch>
16195 Instance::createImagePipeSurfaceFUCHSIAUnique(
const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,
16196 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
16197 Dispatch
const & d )
const
16202 VkResult result = d.vkCreateImagePipeSurfaceFUCHSIA(
16204 reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ),
16205 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
16206 reinterpret_cast<VkSurfaceKHR *>( &surface ) );
16210 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
16211 UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *
this, allocator, d ) ) );
16217 #if defined( VK_USE_PLATFORM_METAL_EXT )
16220 template <
typename Dispatch>
16227 return static_cast<Result>( d.vkCreateMetalSurfaceEXT( m_instance,
16228 reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( pCreateInfo ),
16229 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
16230 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
16233 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16234 template <
typename Dispatch>
16236 Instance::createMetalSurfaceEXT(
const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo,
16237 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
16238 Dispatch
const & d )
const
16244 d.vkCreateMetalSurfaceEXT( m_instance,
16245 reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ),
16246 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
16247 reinterpret_cast<VkSurfaceKHR *>( &surface ) );
16253 # ifndef VULKAN_HPP_NO_SMART_HANDLE
16254 template <
typename Dispatch>
16256 Instance::createMetalSurfaceEXTUnique(
const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo,
16257 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
16258 Dispatch
const & d )
const
16264 d.vkCreateMetalSurfaceEXT( m_instance,
16265 reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ),
16266 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
16267 reinterpret_cast<VkSurfaceKHR *>( &surface ) );
16271 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
16272 UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *
this, allocator, d ) ) );
16280 template <
typename Dispatch>
16287 return static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR(
16288 m_physicalDevice, pFragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( pFragmentShadingRates ) ) );
16291 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16292 template <
typename PhysicalDeviceFragmentShadingRateKHRAllocator,
typename Dispatch>
16299 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator> fragmentShadingRates;
16300 uint32_t fragmentShadingRateCount;
16304 result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount,
nullptr );
16305 if ( ( result ==
VK_SUCCESS ) && fragmentShadingRateCount )
16307 fragmentShadingRates.resize( fragmentShadingRateCount );
16308 result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR(
16309 m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) );
16314 if ( fragmentShadingRateCount < fragmentShadingRates.size() )
16316 fragmentShadingRates.resize( fragmentShadingRateCount );
16318 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fragmentShadingRates );
16321 template <
typename PhysicalDeviceFragmentShadingRateKHRAllocator,
16328 Dispatch
const & d )
const
16332 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator> fragmentShadingRates(
16333 physicalDeviceFragmentShadingRateKHRAllocator );
16334 uint32_t fragmentShadingRateCount;
16338 result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount,
nullptr );
16339 if ( ( result ==
VK_SUCCESS ) && fragmentShadingRateCount )
16341 fragmentShadingRates.resize( fragmentShadingRateCount );
16342 result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR(
16343 m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) );
16348 if ( fragmentShadingRateCount < fragmentShadingRates.size() )
16350 fragmentShadingRates.resize( fragmentShadingRateCount );
16352 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fragmentShadingRates );
16356 template <
typename Dispatch>
16362 d.vkCmdSetFragmentShadingRateKHR(
16363 m_commandBuffer, reinterpret_cast<const VkExtent2D *>( pFragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
16366 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16367 template <
typename Dispatch>
16374 d.vkCmdSetFragmentShadingRateKHR(
16375 m_commandBuffer, reinterpret_cast<const VkExtent2D *>( &fragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
16381 template <
typename Dispatch>
16386 return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
16389 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16390 template <
typename Dispatch>
16396 VkDeviceAddress result = d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
16404 template <
typename Dispatch>
16410 return static_cast<Result>(
16411 d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) );
16414 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16415 template <
typename PhysicalDeviceToolPropertiesAllocator,
typename Dispatch>
16422 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties;
16423 uint32_t toolCount;
16427 result = d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount,
nullptr );
16428 if ( ( result ==
VK_SUCCESS ) && toolCount )
16430 toolProperties.resize( toolCount );
16432 d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
16437 if ( toolCount < toolProperties.size() )
16439 toolProperties.resize( toolCount );
16441 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), toolProperties );
16444 template <
typename PhysicalDeviceToolPropertiesAllocator,
16454 std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties(
16455 physicalDeviceToolPropertiesAllocator );
16456 uint32_t toolCount;
16460 result = d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount,
nullptr );
16461 if ( ( result ==
VK_SUCCESS ) && toolCount )
16463 toolProperties.resize( toolCount );
16465 d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
16470 if ( toolCount < toolProperties.size() )
16472 toolProperties.resize( toolCount );
16474 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), toolProperties );
16480 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
16481 template <
typename Dispatch>
16483 uint64_t presentId,
16488 return static_cast<Result>( d.vkWaitForPresentKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), presentId,
timeout ) );
16491 template <
typename Dispatch>
16497 VkResult result = d.vkWaitForPresentKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), presentId, timeout );
16498 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
16508 template <
typename Dispatch>
16513 return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
16514 m_physicalDevice, pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( pProperties ) ) );
16517 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16518 template <
typename CooperativeMatrixPropertiesNVAllocator,
typename Dispatch>
16525 std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties;
16526 uint32_t propertyCount;
16530 result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount,
nullptr );
16531 if ( ( result ==
VK_SUCCESS ) && propertyCount )
16533 properties.resize( propertyCount );
16534 result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
16535 m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) );
16540 if ( propertyCount < properties.size() )
16542 properties.resize( propertyCount );
16547 template <
typename CooperativeMatrixPropertiesNVAllocator,
16554 Dispatch
const & d )
const
16558 std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties(
16559 cooperativeMatrixPropertiesNVAllocator );
16560 uint32_t propertyCount;
16564 result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount,
nullptr );
16565 if ( ( result ==
VK_SUCCESS ) && propertyCount )
16567 properties.resize( propertyCount );
16568 result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
16569 m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) );
16574 if ( propertyCount < properties.size() )
16576 properties.resize( propertyCount );
16584 template <
typename Dispatch>
16589 return static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
16590 m_physicalDevice, pCombinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( pCombinations ) ) );
16593 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16594 template <
typename FramebufferMixedSamplesCombinationNVAllocator,
typename Dispatch>
16601 std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations;
16602 uint32_t combinationCount;
16606 result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount,
nullptr );
16607 if ( ( result ==
VK_SUCCESS ) && combinationCount )
16609 combinations.resize( combinationCount );
16610 result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
16611 m_physicalDevice, &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) );
16614 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
16617 if ( combinationCount < combinations.size() )
16619 combinations.resize( combinationCount );
16624 template <
typename FramebufferMixedSamplesCombinationNVAllocator,
16631 FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator, Dispatch
const & d )
const
16635 std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations(
16636 framebufferMixedSamplesCombinationNVAllocator );
16637 uint32_t combinationCount;
16641 result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount,
nullptr );
16642 if ( ( result ==
VK_SUCCESS ) && combinationCount )
16644 combinations.resize( combinationCount );
16645 result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
16646 m_physicalDevice, &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) );
16649 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
16652 if ( combinationCount < combinations.size() )
16654 combinations.resize( combinationCount );
16660 #if defined( VK_USE_PLATFORM_WIN32_KHR )
16663 template <
typename Dispatch>
16666 uint32_t * pPresentModeCount,
16671 return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice,
16672 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
16674 reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) );
16677 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16678 template <
typename PresentModeKHRAllocator,
typename Dispatch>
16684 std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes;
16685 uint32_t presentModeCount;
16689 result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
16690 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount,
nullptr );
16691 if ( ( result ==
VK_SUCCESS ) && presentModeCount )
16693 presentModes.resize( presentModeCount );
16694 result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice,
16695 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
16697 reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
16702 if ( presentModeCount < presentModes.size() )
16704 presentModes.resize( presentModeCount );
16709 template <
typename PresentModeKHRAllocator,
16715 PresentModeKHRAllocator & presentModeKHRAllocator,
16716 Dispatch
const & d )
const
16720 std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator );
16721 uint32_t presentModeCount;
16725 result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
16726 m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount,
nullptr );
16727 if ( ( result ==
VK_SUCCESS ) && presentModeCount )
16729 presentModes.resize( presentModeCount );
16730 result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice,
16731 reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
16733 reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
16738 if ( presentModeCount < presentModes.size() )
16740 presentModes.resize( presentModeCount );
16746 # ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
16747 template <
typename Dispatch>
16752 return static_cast<Result>( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
16755 template <
typename Dispatch>
16761 VkResult result = d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) );
16768 # ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
16769 template <
typename Dispatch>
16774 return static_cast<Result>( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
16777 template <
typename Dispatch>
16783 VkResult result = d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) );
16790 template <
typename Dispatch>
16797 return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT(
16798 m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
16801 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16802 template <
typename Dispatch>
16809 VkResult result = d.vkGetDeviceGroupSurfacePresentModes2EXT(
16810 m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) );
16820 template <
typename Dispatch>
16827 return static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( m_instance,
16828 reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( pCreateInfo ),
16829 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
16830 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
16833 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16834 template <
typename Dispatch>
16838 Dispatch
const & d )
const
16843 VkResult result = d.vkCreateHeadlessSurfaceEXT(
16845 reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ),
16846 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
16847 reinterpret_cast<VkSurfaceKHR *>( &surface ) );
16853 # ifndef VULKAN_HPP_NO_SMART_HANDLE
16854 template <
typename Dispatch>
16858 Dispatch
const & d )
const
16863 VkResult result = d.vkCreateHeadlessSurfaceEXT(
16865 reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ),
16866 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
16867 reinterpret_cast<VkSurfaceKHR *>( &surface ) );
16871 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
16879 template <
typename Dispatch>
16884 return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
16887 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16888 template <
typename Dispatch>
16894 VkDeviceAddress result = d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
16900 template <
typename Dispatch>
16905 return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
16908 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16909 template <
typename Dispatch>
16915 uint64_t result = d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
16921 template <
typename Dispatch>
16926 return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
16929 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16930 template <
typename Dispatch>
16936 uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
16944 template <
typename Dispatch>
16949 d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern );
16954 template <
typename Dispatch>
16956 uint32_t firstQuery,
16957 uint32_t queryCount,
16961 d.vkResetQueryPoolEXT( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
16966 template <
typename Dispatch>
16970 d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) );
16973 template <
typename Dispatch>
16977 d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) );
16980 template <
typename Dispatch>
16985 d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) );
16988 template <
typename Dispatch>
16994 d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
16997 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16998 template <
typename Dispatch>
17004 d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewports.size(),
reinterpret_cast<const VkViewport *
>( viewports.data() ) );
17008 template <
typename Dispatch>
17013 d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
17016 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17017 template <
typename Dispatch>
17023 d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissors.size(),
reinterpret_cast<const VkRect2D *
>( scissors.data() ) );
17027 template <
typename Dispatch>
17029 uint32_t bindingCount,
17037 d.vkCmdBindVertexBuffers2EXT( m_commandBuffer,
17040 reinterpret_cast<const VkBuffer *>( pBuffers ),
17041 reinterpret_cast<const VkDeviceSize *>( pOffsets ),
17042 reinterpret_cast<const VkDeviceSize *>( pSizes ),
17043 reinterpret_cast<const VkDeviceSize *>( pStrides ) );
17046 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17047 template <
typename Dispatch>
17056 # ifdef VULKAN_HPP_NO_EXCEPTIONS
17075 d.vkCmdBindVertexBuffers2EXT( m_commandBuffer,
17078 reinterpret_cast<const VkBuffer *
>(
buffers.data() ),
17079 reinterpret_cast<const VkDeviceSize *>(
offsets.data() ),
17080 reinterpret_cast<const VkDeviceSize *>(
sizes.data() ),
17081 reinterpret_cast<const VkDeviceSize *>(
strides.data() ) );
17085 template <
typename Dispatch>
17089 d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) );
17092 template <
typename Dispatch>
17096 d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) );
17099 template <
typename Dispatch>
17103 d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) );
17106 template <
typename Dispatch>
17111 d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) );
17114 template <
typename Dispatch>
17118 d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) );
17121 template <
typename Dispatch>
17130 d.vkCmdSetStencilOpEXT( m_commandBuffer,
17131 static_cast<VkStencilFaceFlags>( faceMask ),
17132 static_cast<VkStencilOp>( failOp ),
17133 static_cast<VkStencilOp>( passOp ),
17134 static_cast<VkStencilOp>( depthFailOp ),
17135 static_cast<VkCompareOp>( compareOp ) );
17140 template <
typename Dispatch>
17146 return static_cast<Result>( d.vkCreateDeferredOperationKHR(
17147 m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDeferredOperationKHR *>( pDeferredOperation ) ) );
17150 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17151 template <
typename Dispatch>
17158 VkResult result = d.vkCreateDeferredOperationKHR(
17160 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
17161 reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) );
17164 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), deferredOperation );
17167 # ifndef VULKAN_HPP_NO_SMART_HANDLE
17168 template <
typename Dispatch>
17175 VkResult result = d.vkCreateDeferredOperationKHR(
17177 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
17178 reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) );
17182 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
17188 template <
typename Dispatch>
17194 d.vkDestroyDeferredOperationKHR(
17195 m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
17198 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17199 template <
typename Dispatch>
17206 d.vkDestroyDeferredOperationKHR(
17208 static_cast<VkDeferredOperationKHR>( operation ),
17209 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
17213 template <
typename Dispatch>
17219 d.vkDestroyDeferredOperationKHR(
17220 m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
17223 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17224 template <
typename Dispatch>
17231 d.vkDestroyDeferredOperationKHR(
17233 static_cast<VkDeferredOperationKHR>( operation ),
17234 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
17238 template <
typename Dispatch>
17243 return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) );
17246 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17247 template <
typename Dispatch>
17252 return static_cast<Result>( d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
17255 template <
typename Dispatch>
17261 VkResult result = d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) );
17267 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17268 template <
typename Dispatch>
17273 return static_cast<Result>( d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
17276 template <
typename Dispatch>
17278 Dispatch
const & d )
const
17282 VkResult result = d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) );
17283 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
17293 template <
typename Dispatch>
17295 uint32_t * pExecutableCount,
17300 return static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device,
17301 reinterpret_cast<const VkPipelineInfoKHR *>( pPipelineInfo ),
17303 reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( pProperties ) ) );
17306 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17307 template <
typename PipelineExecutablePropertiesKHRAllocator,
typename Dispatch>
17314 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties;
17315 uint32_t executableCount;
17319 result = d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount,
nullptr );
17320 if ( ( result ==
VK_SUCCESS ) && executableCount )
17322 properties.resize( executableCount );
17323 result = d.vkGetPipelineExecutablePropertiesKHR( m_device,
17324 reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ),
17326 reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) );
17331 if ( executableCount < properties.size() )
17333 properties.resize( executableCount );
17338 template <
typename PipelineExecutablePropertiesKHRAllocator,
17345 PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator,
17346 Dispatch
const & d )
const
17350 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties(
17351 pipelineExecutablePropertiesKHRAllocator );
17352 uint32_t executableCount;
17356 result = d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount,
nullptr );
17357 if ( ( result ==
VK_SUCCESS ) && executableCount )
17359 properties.resize( executableCount );
17360 result = d.vkGetPipelineExecutablePropertiesKHR( m_device,
17361 reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ),
17363 reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) );
17368 if ( executableCount < properties.size() )
17370 properties.resize( executableCount );
17376 template <
typename Dispatch>
17379 uint32_t * pStatisticCount,
17384 return static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device,
17385 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ),
17387 reinterpret_cast<VkPipelineExecutableStatisticKHR *>( pStatistics ) ) );
17390 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17391 template <
typename PipelineExecutableStatisticKHRAllocator,
typename Dispatch>
17398 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>
statistics;
17399 uint32_t statisticCount;
17404 d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount,
nullptr );
17405 if ( ( result ==
VK_SUCCESS ) && statisticCount )
17407 statistics.resize( statisticCount );
17408 result = d.vkGetPipelineExecutableStatisticsKHR( m_device,
17409 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
17411 reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) );
17416 if ( statisticCount < statistics.size() )
17418 statistics.resize( statisticCount );
17423 template <
typename PipelineExecutableStatisticKHRAllocator,
17430 PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator,
17431 Dispatch
const & d )
const
17435 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>
statistics(
17436 pipelineExecutableStatisticKHRAllocator );
17437 uint32_t statisticCount;
17442 d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount,
nullptr );
17443 if ( ( result ==
VK_SUCCESS ) && statisticCount )
17445 statistics.resize( statisticCount );
17446 result = d.vkGetPipelineExecutableStatisticsKHR( m_device,
17447 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
17449 reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) );
17454 if ( statisticCount < statistics.size() )
17456 statistics.resize( statisticCount );
17462 template <
typename Dispatch>
17465 uint32_t * pInternalRepresentationCount,
17470 return static_cast<Result>(
17471 d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device,
17472 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ),
17473 pInternalRepresentationCount,
17474 reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( pInternalRepresentations ) ) );
17477 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17478 template <
typename PipelineExecutableInternalRepresentationKHRAllocator,
typename Dispatch>
17480 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>
::type
17485 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>
17486 internalRepresentations;
17487 uint32_t internalRepresentationCount;
17491 result = d.vkGetPipelineExecutableInternalRepresentationsKHR(
17492 m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount,
nullptr );
17493 if ( ( result ==
VK_SUCCESS ) && internalRepresentationCount )
17495 internalRepresentations.resize( internalRepresentationCount );
17496 result = d.vkGetPipelineExecutableInternalRepresentationsKHR(
17498 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
17499 &internalRepresentationCount,
17500 reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) );
17504 VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
17505 if ( internalRepresentationCount < internalRepresentations.size() )
17507 internalRepresentations.resize( internalRepresentationCount );
17509 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), internalRepresentations );
17512 template <
typename PipelineExecutableInternalRepresentationKHRAllocator,
17517 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>
::type
17520 PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator,
17521 Dispatch
const & d )
const
17525 std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>
17526 internalRepresentations( pipelineExecutableInternalRepresentationKHRAllocator );
17527 uint32_t internalRepresentationCount;
17531 result = d.vkGetPipelineExecutableInternalRepresentationsKHR(
17532 m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount,
nullptr );
17533 if ( ( result ==
VK_SUCCESS ) && internalRepresentationCount )
17535 internalRepresentations.resize( internalRepresentationCount );
17536 result = d.vkGetPipelineExecutableInternalRepresentationsKHR(
17538 reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
17539 &internalRepresentationCount,
17540 reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) );
17544 VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
17545 if ( internalRepresentationCount < internalRepresentations.size() )
17547 internalRepresentations.resize( internalRepresentationCount );
17549 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), internalRepresentations );
17555 template <
typename Dispatch>
17561 d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device,
17562 reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( pInfo ),
17563 reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
17566 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17567 template <
typename Dispatch>
17575 d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device,
17576 reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ),
17577 reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
17579 return memoryRequirements;
17582 template <
typename X,
typename Y,
typename...
Z,
typename Dispatch>
17591 d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device,
17592 reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ),
17593 reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
17595 return structureChain;
17599 template <
typename Dispatch>
17604 d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
17607 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17608 template <
typename Dispatch>
17614 d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
17618 template <
typename Dispatch>
17624 d.vkCmdExecuteGeneratedCommandsNV(
17625 m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
17628 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17629 template <
typename Dispatch>
17636 d.vkCmdExecuteGeneratedCommandsNV(
17637 m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
17641 template <
typename Dispatch>
17644 uint32_t groupIndex,
17648 d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ), groupIndex );
17651 template <
typename Dispatch>
17659 return static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV( m_device,
17660 reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( pCreateInfo ),
17661 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
17662 reinterpret_cast<VkIndirectCommandsLayoutNV *>( pIndirectCommandsLayout ) ) );
17665 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17666 template <
typename Dispatch>
17670 Dispatch
const & d )
const
17675 VkResult result = d.vkCreateIndirectCommandsLayoutNV(
17677 reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ),
17678 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
17679 reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) );
17682 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), indirectCommandsLayout );
17685 # ifndef VULKAN_HPP_NO_SMART_HANDLE
17686 template <
typename Dispatch>
17690 Dispatch
const & d )
const
17695 VkResult result = d.vkCreateIndirectCommandsLayoutNV(
17697 reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ),
17698 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
17699 reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) );
17709 template <
typename Dispatch>
17715 d.vkDestroyIndirectCommandsLayoutNV(
17716 m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
17719 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17720 template <
typename Dispatch>
17727 d.vkDestroyIndirectCommandsLayoutNV(
17729 static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ),
17730 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
17734 template <
typename Dispatch>
17740 d.vkDestroyIndirectCommandsLayoutNV(
17741 m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
17744 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17745 template <
typename Dispatch>
17752 d.vkDestroyIndirectCommandsLayoutNV(
17754 static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ),
17755 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
17761 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17762 template <
typename Dispatch>
17768 return static_cast<Result>( d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast<VkDisplayKHR>( display ) ) );
17771 template <
typename Dispatch>
17777 VkResult result = d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast<VkDisplayKHR>( display ) );
17784 template <
typename Dispatch>
17786 uint32_t connectorId,
17791 return static_cast<Result>( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( display ) ) );
17794 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17795 template <
typename Dispatch>
17802 VkResult result = d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &display ) );
17808 # ifndef VULKAN_HPP_NO_SMART_HANDLE
17809 template <
typename Dispatch>
17816 VkResult result = d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &display ) );
17827 template <
typename Dispatch>
17834 return static_cast<Result>( d.vkCreatePrivateDataSlotEXT( m_device,
17835 reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ),
17836 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
17837 reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) );
17840 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17841 template <
typename Dispatch>
17845 Dispatch
const & d )
const
17850 VkResult result = d.vkCreatePrivateDataSlotEXT(
17852 reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
17853 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
17854 reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) );
17857 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), privateDataSlot );
17860 # ifndef VULKAN_HPP_NO_SMART_HANDLE
17861 template <
typename Dispatch>
17865 Dispatch
const & d )
const
17870 VkResult result = d.vkCreatePrivateDataSlotEXT(
17872 reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
17873 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
17874 reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) );
17878 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
17884 template <
typename Dispatch>
17890 d.vkDestroyPrivateDataSlotEXT( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
17893 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17894 template <
typename Dispatch>
17901 d.vkDestroyPrivateDataSlotEXT(
17903 static_cast<VkPrivateDataSlot>( privateDataSlot ),
17904 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
17908 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17909 template <
typename Dispatch>
17911 uint64_t objectHandle,
17917 return static_cast<Result>(
17918 d.vkSetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ),
data ) );
17921 template <
typename Dispatch>
17923 uint64_t objectHandle,
17926 Dispatch
const & d )
const
17931 d.vkSetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data );
17938 template <
typename Dispatch>
17940 uint64_t objectHandle,
17946 d.vkGetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData );
17949 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17950 template <
typename Dispatch>
17952 uint64_t objectHandle,
17959 d.vkGetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data );
17965 #if defined( VK_ENABLE_BETA_EXTENSIONS )
17968 template <
typename Dispatch>
17969 VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR(
const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo,
17973 d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( pEncodeInfo ) );
17976 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17977 template <
typename Dispatch>
17978 VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR(
const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo,
17983 d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( &encodeInfo ) );
17988 #if defined( VK_USE_PLATFORM_METAL_EXT )
17991 template <
typename Dispatch>
17992 VULKAN_HPP_INLINE void Device::exportMetalObjectsEXT( VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT * pMetalObjectsInfo,
17996 d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( pMetalObjectsInfo ) );
17999 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18000 template <
typename Dispatch>
18006 VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT metalObjectsInfo;
18007 d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) );
18009 return metalObjectsInfo;
18012 template <
typename X,
typename Y,
typename...
Z,
typename Dispatch>
18017 StructureChain<
X,
Y,
Z...> structureChain;
18018 VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT & metalObjectsInfo = structureChain.template get<VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT>();
18019 d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) );
18021 return structureChain;
18028 template <
typename Dispatch>
18034 d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast<VkEvent>(
event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
18037 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18038 template <
typename Dispatch>
18045 d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast<VkEvent>(
event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
18049 template <
typename Dispatch>
18055 d.vkCmdResetEvent2KHR( m_commandBuffer, static_cast<VkEvent>(
event ), static_cast<VkPipelineStageFlags2>( stageMask ) );
18058 template <
typename Dispatch>
18065 d.vkCmdWaitEvents2KHR(
18066 m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) );
18069 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18070 template <
typename Dispatch>
18076 # ifdef VULKAN_HPP_NO_EXCEPTIONS
18079 if ( events.size() != dependencyInfos.size() )
18085 d.vkCmdWaitEvents2KHR( m_commandBuffer,
18087 reinterpret_cast<const VkEvent *
>( events.data() ),
18088 reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) );
18092 template <
typename Dispatch>
18097 d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
18100 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18101 template <
typename Dispatch>
18107 d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
18111 template <
typename Dispatch>
18118 d.vkCmdWriteTimestamp2KHR( m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ),
query );
18121 template <
typename Dispatch>
18128 return static_cast<Result>(
18129 d.vkQueueSubmit2KHR( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) );
18132 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18133 template <
typename Dispatch>
18139 VkResult result = d.vkQueueSubmit2KHR( m_queue, submits.
size(),
reinterpret_cast<const VkSubmitInfo2 *
>( submits.
data() ), static_cast<VkFence>( fence ) );
18146 template <
typename Dispatch>
18154 d.vkCmdWriteBufferMarker2AMD(
18155 m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), marker );
18158 template <
typename Dispatch>
18164 d.vkGetQueueCheckpointData2NV( m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( pCheckpointData ) );
18167 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18168 template <
typename Checkpo
intData2NVAllocator,
typename Dispatch>
18174 std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> checkpointData;
18175 uint32_t checkpointDataCount;
18176 d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount,
nullptr );
18177 checkpointData.resize( checkpointDataCount );
18178 d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
18181 if ( checkpointDataCount < checkpointData.size() )
18183 checkpointData.resize( checkpointDataCount );
18185 return checkpointData;
18188 template <
typename CheckpointData2NVAllocator,
18197 std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> checkpointData( checkpointData2NVAllocator );
18198 uint32_t checkpointDataCount;
18199 d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount,
nullptr );
18200 checkpointData.resize( checkpointDataCount );
18201 d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
18204 if ( checkpointDataCount < checkpointData.size() )
18206 checkpointData.resize( checkpointDataCount );
18208 return checkpointData;
18214 template <
typename Dispatch>
18220 d.vkGetDescriptorSetLayoutSizeEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), reinterpret_cast<VkDeviceSize *>( pLayoutSizeInBytes ) );
18223 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18224 template <
typename Dispatch>
18231 d.vkGetDescriptorSetLayoutSizeEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), reinterpret_cast<VkDeviceSize *>( &layoutSizeInBytes ) );
18233 return layoutSizeInBytes;
18237 template <
typename Dispatch>
18244 d.vkGetDescriptorSetLayoutBindingOffsetEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), binding, reinterpret_cast<VkDeviceSize *>( pOffset ) );
18247 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18248 template <
typename Dispatch>
18255 d.vkGetDescriptorSetLayoutBindingOffsetEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), binding, reinterpret_cast<VkDeviceSize *>( &offset ) );
18261 template <
typename Dispatch>
18264 void * pDescriptor,
18268 d.vkGetDescriptorEXT( m_device, reinterpret_cast<const VkDescriptorGetInfoEXT *>( pDescriptorInfo ), dataSize, pDescriptor );
18271 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18272 template <
typename DescriptorType,
typename Dispatch>
18279 d.vkGetDescriptorEXT(
18280 m_device, reinterpret_cast<const VkDescriptorGetInfoEXT *>( &descriptorInfo ),
sizeof(
DescriptorType ), reinterpret_cast<void *>( &descriptor ) );
18286 template <
typename Dispatch>
18292 d.vkCmdBindDescriptorBuffersEXT( m_commandBuffer, bufferCount, reinterpret_cast<const VkDescriptorBufferBindingInfoEXT *>( pBindingInfos ) );
18295 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18296 template <
typename Dispatch>
18303 d.vkCmdBindDescriptorBuffersEXT( m_commandBuffer, bindingInfos.size(),
reinterpret_cast<const VkDescriptorBufferBindingInfoEXT *
>( bindingInfos.data() ) );
18307 template <
typename Dispatch>
18312 const uint32_t * pBufferIndices,
18317 d.vkCmdSetDescriptorBufferOffsetsEXT( m_commandBuffer,
18318 static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
18319 static_cast<VkPipelineLayout>( layout ),
18323 reinterpret_cast<const VkDeviceSize *>( pOffsets ) );
18326 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18327 template <
typename Dispatch>
18336 # ifdef VULKAN_HPP_NO_EXCEPTIONS
18339 if ( bufferIndices.size() !=
offsets.size() )
18345 d.vkCmdSetDescriptorBufferOffsetsEXT( m_commandBuffer,
18346 static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
18347 static_cast<VkPipelineLayout>( layout ),
18349 bufferIndices.size(),
18350 bufferIndices.data(),
18355 template <
typename Dispatch>
18362 d.vkCmdBindDescriptorBufferEmbeddedSamplersEXT(
18363 m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set );
18366 template <
typename Dispatch>
18371 return static_cast<Result>(
18372 d.vkGetBufferOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
18375 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18376 template <
typename DataType,
typename Dispatch>
18383 VkResult result = d.vkGetBufferOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT *>( &info ), &data );
18390 template <
typename Dispatch>
18395 return static_cast<Result>(
18396 d.vkGetImageOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
18399 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18400 template <
typename DataType,
typename Dispatch>
18407 VkResult result = d.vkGetImageOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT *>( &info ), &data );
18414 template <
typename Dispatch>
18419 return static_cast<Result>(
18420 d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
18423 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18424 template <
typename DataType,
typename Dispatch>
18432 d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT *>( &info ), &data );
18439 template <
typename Dispatch>
18444 return static_cast<Result>(
18445 d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
18448 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18449 template <
typename DataType,
typename Dispatch>
18456 VkResult result = d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT *>( &info ), &data );
18463 template <
typename Dispatch>
18468 return static_cast<Result>( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT(
18469 m_device, reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
18472 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18473 template <
typename DataType,
typename Dispatch>
18476 Dispatch
const & d )
const
18481 VkResult result = d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT(
18482 m_device, reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( &info ), &data );
18483 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
18492 template <
typename Dispatch>
18498 d.vkCmdSetFragmentShadingRateEnumNV(
18499 m_commandBuffer, static_cast<VkFragmentShadingRateNV>( shadingRate ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
18504 template <
typename Dispatch>
18509 d.vkCmdDrawMeshTasksEXT( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
18512 template <
typename Dispatch>
18515 uint32_t drawCount,
18520 d.vkCmdDrawMeshTasksIndirectEXT( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>(
offset ), drawCount,
stride );
18523 template <
typename Dispatch>
18528 uint32_t maxDrawCount,
18533 d.vkCmdDrawMeshTasksIndirectCountEXT( m_commandBuffer,
18534 static_cast<VkBuffer>( buffer ),
18535 static_cast<VkDeviceSize>(
offset ),
18536 static_cast<VkBuffer>( countBuffer ),
18537 static_cast<VkDeviceSize>( countBufferOffset ),
18544 template <
typename Dispatch>
18549 d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) );
18552 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18553 template <
typename Dispatch>
18559 d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( ©BufferInfo ) );
18563 template <
typename Dispatch>
18568 d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) );
18571 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18572 template <
typename Dispatch>
18578 d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( ©ImageInfo ) );
18582 template <
typename Dispatch>
18587 d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) );
18590 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18591 template <
typename Dispatch>
18597 d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( ©BufferToImageInfo ) );
18601 template <
typename Dispatch>
18606 d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) );
18609 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18610 template <
typename Dispatch>
18616 d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( ©ImageToBufferInfo ) );
18620 template <
typename Dispatch>
18625 d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) );
18628 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18629 template <
typename Dispatch>
18635 d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) );
18639 template <
typename Dispatch>
18644 d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) );
18647 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18648 template <
typename Dispatch>
18654 d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) );
18660 template <
typename Dispatch>
18667 d.vkGetImageSubresourceLayout2EXT( m_device,
18668 static_cast<VkImage>(
image ),
18669 reinterpret_cast<const VkImageSubresource2EXT *>( pSubresource ),
18670 reinterpret_cast<VkSubresourceLayout2EXT *>( pLayout ) );
18673 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18674 template <
typename Dispatch>
18681 d.vkGetImageSubresourceLayout2EXT( m_device,
18682 static_cast<VkImage>(
image ),
18683 reinterpret_cast<const VkImageSubresource2EXT *>( &subresource ),
18684 reinterpret_cast<VkSubresourceLayout2EXT *>( &layout ) );
18689 template <
typename X,
typename Y,
typename...
Z,
typename Dispatch>
18697 d.vkGetImageSubresourceLayout2EXT( m_device,
18698 static_cast<VkImage>(
image ),
18699 reinterpret_cast<const VkImageSubresource2EXT *>( &subresource ),
18700 reinterpret_cast<VkSubresourceLayout2EXT *>( &layout ) );
18702 return structureChain;
18708 template <
typename Dispatch>
18714 return static_cast<Result>( d.vkGetDeviceFaultInfoEXT(
18715 m_device, reinterpret_cast<VkDeviceFaultCountsEXT *>( pFaultCounts ), reinterpret_cast<VkDeviceFaultInfoEXT *>( pFaultInfo ) ) );
18718 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18719 template <
typename Dispatch>
18725 std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>
data;
18729 d.vkGetDeviceFaultInfoEXT( m_device, reinterpret_cast<VkDeviceFaultCountsEXT *>( &faultCounts ), reinterpret_cast<VkDeviceFaultInfoEXT *>( &faultInfo ) );
18730 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
18739 #if defined( VK_USE_PLATFORM_WIN32_KHR )
18742 # ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18743 template <
typename Dispatch>
18748 return static_cast<Result>( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
18751 template <
typename Dispatch>
18757 VkResult result = d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) );
18764 template <
typename Dispatch>
18770 return static_cast<Result>( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) );
18773 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18774 template <
typename Dispatch>
18776 PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch
const & d )
const
18781 VkResult result = d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) );
18787 # ifndef VULKAN_HPP_NO_SMART_HANDLE
18788 template <
typename Dispatch>
18790 PhysicalDevice::getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch
const & d )
const
18795 VkResult result = d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) );
18799 UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, ObjectRelease<PhysicalDevice, Dispatch>( *
this, d ) ) );
18805 #if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
18808 template <
typename Dispatch>
18815 return static_cast<Result>( d.vkCreateDirectFBSurfaceEXT( m_instance,
18816 reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( pCreateInfo ),
18817 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
18818 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
18821 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18822 template <
typename Dispatch>
18824 Instance::createDirectFBSurfaceEXT(
const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo,
18825 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
18826 Dispatch
const & d )
const
18831 VkResult result = d.vkCreateDirectFBSurfaceEXT(
18833 reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ),
18834 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
18835 reinterpret_cast<VkSurfaceKHR *>( &surface ) );
18841 # ifndef VULKAN_HPP_NO_SMART_HANDLE
18842 template <
typename Dispatch>
18844 Instance::createDirectFBSurfaceEXTUnique(
const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo,
18845 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
18846 Dispatch
const & d )
const
18851 VkResult result = d.vkCreateDirectFBSurfaceEXT(
18853 reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ),
18854 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
18855 reinterpret_cast<VkSurfaceKHR *>( &surface ) );
18859 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
18860 UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *
this, allocator, d ) ) );
18865 template <
typename Dispatch>
18871 return static_cast<Bool32>( d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, dfb ) );
18874 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18875 template <
typename Dispatch>
18877 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch
const & d )
const VULKAN_HPP_NOEXCEPT
18881 VkBool32 result = d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, &dfb );
18890 template <
typename Dispatch>
18901 d.vkCmdTraceRaysKHR( m_commandBuffer,
18902 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ),
18903 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ),
18904 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ),
18905 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ),
18911 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18912 template <
typename Dispatch>
18924 d.vkCmdTraceRaysKHR( m_commandBuffer,
18925 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ),
18926 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ),
18927 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ),
18928 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ),
18935 template <
typename Dispatch>
18939 uint32_t createInfoCount,
18946 return static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device,
18947 static_cast<VkDeferredOperationKHR>( deferredOperation ),
18948 static_cast<VkPipelineCache>( pipelineCache ),
18950 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( pCreateInfos ),
18951 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
18952 reinterpret_cast<VkPipeline *>( pPipelines ) ) );
18955 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18956 template <
typename PipelineAllocator,
typename Dispatch>
18962 Dispatch
const & d )
const
18966 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>
pipelines( createInfos.
size() );
18967 VkResult result = d.vkCreateRayTracingPipelinesKHR(
18969 static_cast<VkDeferredOperationKHR>( deferredOperation ),
18970 static_cast<VkPipelineCache>( pipelineCache ),
18971 createInfos.
size(),
18973 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
18974 reinterpret_cast<VkPipeline *>(
pipelines.data() ) );
18975 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
18985 template <
typename PipelineAllocator,
18994 PipelineAllocator & pipelineAllocator,
18995 Dispatch
const & d )
const
18999 std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>
pipelines( createInfos.
size(), pipelineAllocator );
19000 VkResult result = d.vkCreateRayTracingPipelinesKHR(
19002 static_cast<VkDeferredOperationKHR>( deferredOperation ),
19003 static_cast<VkPipelineCache>( pipelineCache ),
19004 createInfos.
size(),
19006 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
19007 reinterpret_cast<VkPipeline *>(
pipelines.data() ) );
19008 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
19018 template <
typename Dispatch>
19024 Dispatch
const & d )
const
19029 VkResult result = d.vkCreateRayTracingPipelinesKHR(
19031 static_cast<VkDeferredOperationKHR>( deferredOperation ),
19032 static_cast<VkPipelineCache>( pipelineCache ),
19034 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ),
19035 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
19036 reinterpret_cast<VkPipeline *>( &pipeline ) );
19037 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
19047 # ifndef VULKAN_HPP_NO_SMART_HANDLE
19048 template <
typename Dispatch,
typename PipelineAllocator>
19055 Dispatch
const & d )
const
19059 std::vector<VULKAN_HPP_NAMESPACE::Pipeline>
pipelines( createInfos.
size() );
19060 VkResult result = d.vkCreateRayTracingPipelinesKHR(
19062 static_cast<VkDeferredOperationKHR>( deferredOperation ),
19063 static_cast<VkPipelineCache>( pipelineCache ),
19064 createInfos.
size(),
19066 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
19067 reinterpret_cast<VkPipeline *>(
pipelines.data() ) );
19068 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
19074 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
19075 uniquePipelines.reserve( createInfos.
size() );
19077 for (
auto const & pipeline :
pipelines )
19085 template <
typename Dispatch,
19086 typename PipelineAllocator,
19088 typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>
::value,
int>
::type>
19095 PipelineAllocator & pipelineAllocator,
19096 Dispatch
const & d )
const
19100 std::vector<VULKAN_HPP_NAMESPACE::Pipeline>
pipelines( createInfos.
size() );
19101 VkResult result = d.vkCreateRayTracingPipelinesKHR(
19103 static_cast<VkDeferredOperationKHR>( deferredOperation ),
19104 static_cast<VkPipelineCache>( pipelineCache ),
19105 createInfos.
size(),
19107 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
19108 reinterpret_cast<VkPipeline *>(
pipelines.data() ) );
19109 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
19115 std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
19116 uniquePipelines.reserve( createInfos.
size() );
19118 for (
auto const & pipeline :
pipelines )
19126 template <
typename Dispatch>
19132 Dispatch
const & d )
const
19137 VkResult result = d.vkCreateRayTracingPipelinesKHR(
19139 static_cast<VkDeferredOperationKHR>( deferredOperation ),
19140 static_cast<VkPipelineCache>( pipelineCache ),
19142 reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ),
19143 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
19144 reinterpret_cast<VkPipeline *>( &pipeline ) );
19145 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
19159 template <
typename Dispatch>
19161 uint32_t firstGroup,
19162 uint32_t groupCount,
19168 return static_cast<Result>(
19169 d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
19172 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19173 template <
typename DataType,
typename DataTypeAllocator,
typename Dispatch>
19180 std::vector<DataType, DataTypeAllocator>
data( dataSize /
sizeof(
DataType ) );
19181 VkResult result = d.vkGetRayTracingShaderGroupHandlesKHR(
19182 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() *
sizeof(
DataType ), reinterpret_cast<void *>( data.data() ) );
19188 template <
typename DataType,
typename Dispatch>
19195 VkResult result = d.vkGetRayTracingShaderGroupHandlesKHR(
19196 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount,
sizeof(
DataType ), reinterpret_cast<void *>( &data ) );
19203 template <
typename Dispatch>
19205 uint32_t firstGroup,
19206 uint32_t groupCount,
19212 return static_cast<Result>(
19213 d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
19216 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19217 template <
typename DataType,
typename DataTypeAllocator,
typename Dispatch>
19225 std::vector<DataType, DataTypeAllocator>
data( dataSize /
sizeof(
DataType ) );
19226 VkResult result = d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(
19227 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() *
sizeof(
DataType ), reinterpret_cast<void *>( data.data() ) );
19233 template <
typename DataType,
typename Dispatch>
19240 VkResult result = d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(
19241 m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount,
sizeof(
DataType ), reinterpret_cast<void *>( &data ) );
19248 template <
typename Dispatch>
19257 d.vkCmdTraceRaysIndirectKHR( m_commandBuffer,
19258 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ),
19259 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ),
19260 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ),
19261 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ),
19262 static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
19265 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19266 template <
typename Dispatch>
19276 d.vkCmdTraceRaysIndirectKHR( m_commandBuffer,
19277 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ),
19278 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ),
19279 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ),
19280 reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ),
19281 static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
19285 template <
typename Dispatch>
19293 d.vkGetRayTracingShaderGroupStackSizeKHR( m_device, static_cast<VkPipeline>( pipeline ), group, static_cast<VkShaderGroupShaderKHR>( groupShader ) ) );
19296 template <
typename Dispatch>
19300 d.vkCmdSetRayTracingPipelineStackSizeKHR( m_commandBuffer, pipelineStackSize );
19305 template <
typename Dispatch>
19308 uint32_t vertexAttributeDescriptionCount,
19313 d.vkCmdSetVertexInputEXT( m_commandBuffer,
19314 vertexBindingDescriptionCount,
19315 reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( pVertexBindingDescriptions ),
19316 vertexAttributeDescriptionCount,
19317 reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( pVertexAttributeDescriptions ) );
19320 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19321 template <
typename Dispatch>
19329 d.vkCmdSetVertexInputEXT( m_commandBuffer,
19330 vertexBindingDescriptions.size(),
19332 vertexAttributeDescriptions.size(),
19337 #if defined( VK_USE_PLATFORM_FUCHSIA )
19340 template <
typename Dispatch>
19342 Device::getMemoryZirconHandleFUCHSIA(
const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
19343 zx_handle_t * pZirconHandle,
19347 return static_cast<Result>(
19348 d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) );
19351 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19352 template <
typename Dispatch>
19354 Device::getMemoryZirconHandleFUCHSIA(
const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch
const & d )
const
19358 zx_handle_t zirconHandle;
19360 d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle );
19367 template <
typename Dispatch>
19370 zx_handle_t zirconHandle,
19371 VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties,
19375 return static_cast<Result>(
19376 d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device,
19377 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
19379 reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( pMemoryZirconHandleProperties ) ) );
19382 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19383 template <
typename Dispatch>
19386 zx_handle_t zirconHandle,
19387 Dispatch
const & d )
const
19391 VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties;
19392 VkResult result = d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device,
19393 static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
19395 reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( &memoryZirconHandleProperties ) );
19398 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryZirconHandleProperties );
19403 #if defined( VK_USE_PLATFORM_FUCHSIA )
19406 template <
typename Dispatch>
19408 const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo, Dispatch
const & d )
const VULKAN_HPP_NOEXCEPT
19411 return static_cast<Result>( d.vkImportSemaphoreZirconHandleFUCHSIA(
19412 m_device, reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( pImportSemaphoreZirconHandleInfo ) ) );
19415 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19416 template <
typename Dispatch>
19418 Device::importSemaphoreZirconHandleFUCHSIA(
const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo,
19419 Dispatch
const & d )
const
19423 VkResult result = d.vkImportSemaphoreZirconHandleFUCHSIA(
19424 m_device, reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( &importSemaphoreZirconHandleInfo ) );
19431 template <
typename Dispatch>
19433 Device::getSemaphoreZirconHandleFUCHSIA(
const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
19434 zx_handle_t * pZirconHandle,
19438 return static_cast<Result>(
19439 d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) );
19442 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19443 template <
typename Dispatch>
19445 Device::getSemaphoreZirconHandleFUCHSIA(
const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch
const & d )
const
19449 zx_handle_t zirconHandle;
19451 d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle );
19459 #if defined( VK_USE_PLATFORM_FUCHSIA )
19462 template <
typename Dispatch>
19464 Device::createBufferCollectionFUCHSIA(
const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA * pCreateInfo,
19466 VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA * pCollection,
19470 return static_cast<Result>( d.vkCreateBufferCollectionFUCHSIA( m_device,
19471 reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( pCreateInfo ),
19472 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
19473 reinterpret_cast<VkBufferCollectionFUCHSIA *>( pCollection ) ) );
19476 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19477 template <
typename Dispatch>
19479 Device::createBufferCollectionFUCHSIA(
const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo,
19480 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
19481 Dispatch
const & d )
const
19485 VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection;
19486 VkResult result = d.vkCreateBufferCollectionFUCHSIA(
19488 reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( &createInfo ),
19489 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
19490 reinterpret_cast<VkBufferCollectionFUCHSIA *>( &collection ) );
19496 # ifndef VULKAN_HPP_NO_SMART_HANDLE
19497 template <
typename Dispatch>
19499 Device::createBufferCollectionFUCHSIAUnique(
const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo,
19500 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
19501 Dispatch
const & d )
const
19505 VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection;
19506 VkResult result = d.vkCreateBufferCollectionFUCHSIA(
19508 reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( &createInfo ),
19509 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
19510 reinterpret_cast<VkBufferCollectionFUCHSIA *>( &collection ) );
19514 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
19515 UniqueHandle<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA, Dispatch>( collection, ObjectDestroy<Device, Dispatch>( *
this, allocator, d ) ) );
19520 template <
typename Dispatch>
19522 Device::setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
19523 const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA * pImageConstraintsInfo,
19527 return static_cast<Result>( d.vkSetBufferCollectionImageConstraintsFUCHSIA(
19528 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( pImageConstraintsInfo ) ) );
19531 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19532 template <
typename Dispatch>
19534 Device::setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
19535 const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo,
19536 Dispatch
const & d )
const
19540 VkResult result = d.vkSetBufferCollectionImageConstraintsFUCHSIA(
19541 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( &imageConstraintsInfo ) );
19548 template <
typename Dispatch>
19550 Device::setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
19551 const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo,
19555 return static_cast<Result>( d.vkSetBufferCollectionBufferConstraintsFUCHSIA(
19556 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( pBufferConstraintsInfo ) ) );
19559 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19560 template <
typename Dispatch>
19562 Device::setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
19563 const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo,
19564 Dispatch
const & d )
const
19568 VkResult result = d.vkSetBufferCollectionBufferConstraintsFUCHSIA(
19569 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( &bufferConstraintsInfo ) );
19576 template <
typename Dispatch>
19577 VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
19582 d.vkDestroyBufferCollectionFUCHSIA(
19583 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
19586 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19587 template <
typename Dispatch>
19588 VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
19589 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
19594 d.vkDestroyBufferCollectionFUCHSIA(
19596 static_cast<VkBufferCollectionFUCHSIA>( collection ),
19597 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
19601 template <
typename Dispatch>
19607 d.vkDestroyBufferCollectionFUCHSIA(
19608 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
19611 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19612 template <
typename Dispatch>
19614 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
19619 d.vkDestroyBufferCollectionFUCHSIA(
19621 static_cast<VkBufferCollectionFUCHSIA>( collection ),
19622 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
19626 template <
typename Dispatch>
19628 Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
19629 VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA * pProperties,
19633 return static_cast<Result>( d.vkGetBufferCollectionPropertiesFUCHSIA(
19634 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( pProperties ) ) );
19637 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19638 template <
typename Dispatch>
19640 Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, Dispatch
const & d )
const
19644 VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA properties;
19645 VkResult result = d.vkGetBufferCollectionPropertiesFUCHSIA(
19646 m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( &properties ) );
19656 template <
typename Dispatch>
19662 return static_cast<Result>( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI(
19663 m_device, static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( pMaxWorkgroupSize ) ) );
19666 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19667 template <
typename Dispatch>
19674 VkResult result = d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI(
19675 m_device, static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( &maxWorkgroupSize ) );
19676 resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
19684 template <
typename Dispatch>
19688 d.vkCmdSubpassShadingHUAWEI( m_commandBuffer );
19693 template <
typename Dispatch>
19699 d.vkCmdBindInvocationMaskHUAWEI( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
19704 template <
typename Dispatch>
19711 return static_cast<Result>( d.vkGetMemoryRemoteAddressNV(
19712 m_device, reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( pMemoryGetRemoteAddressInfo ), reinterpret_cast<VkRemoteAddressNV *>( pAddress ) ) );
19715 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19716 template <
typename Dispatch>
19723 VkResult result = d.vkGetMemoryRemoteAddressNV(
19724 m_device, reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( &memoryGetRemoteAddressInfo ), reinterpret_cast<VkRemoteAddressNV *>( &address ) );
19733 template <
typename Dispatch>
19739 return static_cast<Result>( d.vkGetPipelinePropertiesEXT(
19740 m_device, reinterpret_cast<const VkPipelineInfoEXT *>( pPipelineInfo ), reinterpret_cast<VkBaseOutStructure *>( pPipelineProperties ) ) );
19743 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19744 template <
typename Dispatch>
19751 VkResult result = d.vkGetPipelinePropertiesEXT(
19752 m_device, reinterpret_cast<const VkPipelineInfoEXT *>( &pipelineInfo ), reinterpret_cast<VkBaseOutStructure *>( &pipelineProperties ) );
19755 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelineProperties );
19761 template <
typename Dispatch>
19765 d.vkCmdSetPatchControlPointsEXT( m_commandBuffer, patchControlPoints );
19768 template <
typename Dispatch>
19773 d.vkCmdSetRasterizerDiscardEnableEXT( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) );
19776 template <
typename Dispatch>
19780 d.vkCmdSetDepthBiasEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) );
19783 template <
typename Dispatch>
19787 d.vkCmdSetLogicOpEXT( m_commandBuffer, static_cast<VkLogicOp>( logicOp ) );
19790 template <
typename Dispatch>
19795 d.vkCmdSetPrimitiveRestartEnableEXT( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) );
19798 #if defined( VK_USE_PLATFORM_SCREEN_QNX )
19801 template <
typename Dispatch>
19808 return static_cast<Result>( d.vkCreateScreenSurfaceQNX( m_instance,
19809 reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( pCreateInfo ),
19810 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
19811 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
19814 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19815 template <
typename Dispatch>
19817 Instance::createScreenSurfaceQNX(
const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo,
19818 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
19819 Dispatch
const & d )
const
19824 VkResult result = d.vkCreateScreenSurfaceQNX(
19826 reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ),
19827 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
19828 reinterpret_cast<VkSurfaceKHR *>( &surface ) );
19834 # ifndef VULKAN_HPP_NO_SMART_HANDLE
19835 template <
typename Dispatch>
19837 Instance::createScreenSurfaceQNXUnique(
const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo,
19838 Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
19839 Dispatch
const & d )
const
19844 VkResult result = d.vkCreateScreenSurfaceQNX(
19846 reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ),
19847 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
19848 reinterpret_cast<VkSurfaceKHR *>( &surface ) );
19852 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
19853 UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *
this, allocator, d ) ) );
19858 template <
typename Dispatch>
19860 struct _screen_window * window,
19864 return static_cast<Bool32>( d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, window ) );
19867 # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19868 template <
typename Dispatch>
19870 PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex,
struct _screen_window & window, Dispatch
const & d )
const VULKAN_HPP_NOEXCEPT
19874 VkBool32 result = d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, &window );
19883 template <
typename Dispatch>
19889 d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorWriteEnables ) );
19892 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19893 template <
typename Dispatch>
19899 d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, colorWriteEnables.size(),
reinterpret_cast<const VkBool32 *
>( colorWriteEnables.data() ) );
19905 template <
typename Dispatch>
19910 d.vkCmdTraceRaysIndirect2KHR( m_commandBuffer, static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
19915 template <
typename Dispatch>
19918 uint32_t instanceCount,
19919 uint32_t firstInstance,
19924 d.vkCmdDrawMultiEXT( m_commandBuffer, drawCount, reinterpret_cast<const VkMultiDrawInfoEXT *>( pVertexInfo ), instanceCount, firstInstance,
stride );
19927 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19928 template <
typename Dispatch>
19930 uint32_t instanceCount,
19931 uint32_t firstInstance,
19936 d.vkCmdDrawMultiEXT( m_commandBuffer,
19941 vertexInfo.stride() );
19945 template <
typename Dispatch>
19948 uint32_t instanceCount,
19949 uint32_t firstInstance,
19951 const int32_t * pVertexOffset,
19955 d.vkCmdDrawMultiIndexedEXT(
19956 m_commandBuffer, drawCount, reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( pIndexInfo ), instanceCount, firstInstance,
stride, pVertexOffset );
19959 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19960 template <
typename Dispatch>
19963 uint32_t instanceCount,
19964 uint32_t firstInstance,
19970 d.vkCmdDrawMultiIndexedEXT( m_commandBuffer,
19975 indexInfo.stride(),
19976 static_cast<const int32_t *
>( vertexOffset ) );
19982 template <
typename Dispatch>
19989 return static_cast<Result>( d.vkCreateMicromapEXT( m_device,
19990 reinterpret_cast<const VkMicromapCreateInfoEXT *>( pCreateInfo ),
19991 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
19992 reinterpret_cast<VkMicromapEXT *>( pMicromap ) ) );
19995 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19996 template <
typename Dispatch>
20000 Dispatch
const & d )
const
20006 d.vkCreateMicromapEXT( m_device,
20007 reinterpret_cast<const VkMicromapCreateInfoEXT *>( &createInfo ),
20008 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
20009 reinterpret_cast<VkMicromapEXT *>( µmap ) );
20015 # ifndef VULKAN_HPP_NO_SMART_HANDLE
20016 template <
typename Dispatch>
20020 Dispatch
const & d )
const
20026 d.vkCreateMicromapEXT( m_device,
20027 reinterpret_cast<const VkMicromapCreateInfoEXT *>( &createInfo ),
20028 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
20029 reinterpret_cast<VkMicromapEXT *>( µmap ) );
20033 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
20039 template <
typename Dispatch>
20045 d.vkDestroyMicromapEXT( m_device, static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
20048 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20049 template <
typename Dispatch>
20056 d.vkDestroyMicromapEXT( m_device,
20057 static_cast<VkMicromapEXT>( micromap ),
20058 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
20062 template <
typename Dispatch>
20068 d.vkDestroyMicromapEXT( m_device, static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
20071 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20072 template <
typename Dispatch>
20079 d.vkDestroyMicromapEXT( m_device,
20080 static_cast<VkMicromapEXT>( micromap ),
20081 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
20085 template <
typename Dispatch>
20091 d.vkCmdBuildMicromapsEXT( m_commandBuffer, infoCount, reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) );
20094 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20095 template <
typename Dispatch>
20101 d.vkCmdBuildMicromapsEXT( m_commandBuffer, infos.size(),
reinterpret_cast<const VkMicromapBuildInfoEXT *
>( infos.data() ) );
20105 template <
typename Dispatch>
20107 uint32_t infoCount,
20112 return static_cast<Result>( d.vkBuildMicromapsEXT(
20113 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), infoCount, reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) ) );
20116 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20117 template <
typename Dispatch>
20121 Dispatch
const & d )
const
20125 VkResult result = d.vkBuildMicromapsEXT(
20126 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), infos.
size(),
reinterpret_cast<const VkMicromapBuildInfoEXT *
>( infos.
data() ) );
20128 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
20136 template <
typename Dispatch>
20142 return static_cast<Result>(
20143 d.vkCopyMicromapEXT( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) ) );
20146 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20147 template <
typename Dispatch>
20150 Dispatch
const & d )
const
20155 d.vkCopyMicromapEXT( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) );
20157 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
20165 template <
typename Dispatch>
20171 return static_cast<Result>( d.vkCopyMicromapToMemoryEXT(
20172 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) ) );
20175 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20176 template <
typename Dispatch>
20182 VkResult result = d.vkCopyMicromapToMemoryEXT(
20183 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) );
20185 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
20193 template <
typename Dispatch>
20199 return static_cast<Result>( d.vkCopyMemoryToMicromapEXT(
20200 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) ) );
20203 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20204 template <
typename Dispatch>
20210 VkResult result = d.vkCopyMemoryToMicromapEXT(
20211 m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) );
20213 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
20221 template <
typename Dispatch>
20231 return static_cast<Result>( d.vkWriteMicromapsPropertiesEXT(
20232 m_device, micromapCount, reinterpret_cast<const VkMicromapEXT *>( pMicromaps ), static_cast<VkQueryType>( queryType ), dataSize, pData,
stride ) );
20235 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20236 template <
typename DataType,
typename DataTypeAllocator,
typename Dispatch>
20242 Dispatch
const & d )
const
20247 std::vector<DataType, DataTypeAllocator>
data( dataSize /
sizeof(
DataType ) );
20248 VkResult result = d.vkWriteMicromapsPropertiesEXT( m_device,
20250 reinterpret_cast<const VkMicromapEXT *
>( micromaps.
data() ),
20251 static_cast<VkQueryType>( queryType ),
20253 reinterpret_cast<void *>( data.data() ),
20260 template <
typename DataType,
typename Dispatch>
20265 Dispatch
const & d )
const
20270 VkResult result = d.vkWriteMicromapsPropertiesEXT( m_device,
20272 reinterpret_cast<const VkMicromapEXT *
>( micromaps.
data() ),
20273 static_cast<VkQueryType>( queryType ),
20275 reinterpret_cast<void *>( &data ),
20283 template <
typename Dispatch>
20287 d.vkCmdCopyMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) );
20290 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20291 template <
typename Dispatch>
20296 d.vkCmdCopyMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) );
20300 template <
typename Dispatch>
20305 d.vkCmdCopyMicromapToMemoryEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) );
20308 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20309 template <
typename Dispatch>
20315 d.vkCmdCopyMicromapToMemoryEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) );
20319 template <
typename Dispatch>
20324 d.vkCmdCopyMemoryToMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) );
20327 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20328 template <
typename Dispatch>
20334 d.vkCmdCopyMemoryToMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) );
20338 template <
typename Dispatch>
20343 uint32_t firstQuery,
20347 d.vkCmdWriteMicromapsPropertiesEXT( m_commandBuffer,
20349 reinterpret_cast<const VkMicromapEXT *>( pMicromaps ),
20350 static_cast<VkQueryType>( queryType ),
20351 static_cast<VkQueryPool>( queryPool ),
20355 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20356 template <
typename Dispatch>
20361 uint32_t firstQuery,
20366 d.vkCmdWriteMicromapsPropertiesEXT( m_commandBuffer,
20368 reinterpret_cast<const VkMicromapEXT *
>( micromaps.data() ),
20369 static_cast<VkQueryType>( queryType ),
20370 static_cast<VkQueryPool
>( queryPool ),
20375 template <
typename Dispatch>
20381 d.vkGetDeviceMicromapCompatibilityEXT( m_device,
20382 reinterpret_cast<const VkMicromapVersionInfoEXT *>( pVersionInfo ),
20383 reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) );
20386 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20387 template <
typename Dispatch>
20394 d.vkGetDeviceMicromapCompatibilityEXT( m_device,
20395 reinterpret_cast<const VkMicromapVersionInfoEXT *>( &versionInfo ),
20396 reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) );
20398 return compatibility;
20402 template <
typename Dispatch>
20409 d.vkGetMicromapBuildSizesEXT( m_device,
20410 static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
20411 reinterpret_cast<const VkMicromapBuildInfoEXT *>( pBuildInfo ),
20412 reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( pSizeInfo ) );
20415 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20416 template <
typename Dispatch>
20425 d.vkGetMicromapBuildSizesEXT( m_device,
20426 static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
20427 reinterpret_cast<const VkMicromapBuildInfoEXT *>( &buildInfo ),
20428 reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( &sizeInfo ) );
20436 template <
typename Dispatch>
20440 d.vkSetDeviceMemoryPriorityEXT( m_device, static_cast<VkDeviceMemory>( memory ), priority );
20445 template <
typename Dispatch>
20451 d.vkGetDeviceBufferMemoryRequirementsKHR(
20452 m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
20455 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20456 template <
typename Dispatch>
20463 d.vkGetDeviceBufferMemoryRequirementsKHR(
20464 m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
20466 return memoryRequirements;
20469 template <
typename X,
typename Y,
typename...
Z,
typename Dispatch>
20477 d.vkGetDeviceBufferMemoryRequirementsKHR(
20478 m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
20480 return structureChain;
20484 template <
typename Dispatch>
20490 d.vkGetDeviceImageMemoryRequirementsKHR(
20491 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
20494 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20495 template <
typename Dispatch>
20502 d.vkGetDeviceImageMemoryRequirementsKHR(
20503 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
20505 return memoryRequirements;
20508 template <
typename X,
typename Y,
typename...
Z,
typename Dispatch>
20516 d.vkGetDeviceImageMemoryRequirementsKHR(
20517 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
20519 return structureChain;
20523 template <
typename Dispatch>
20525 uint32_t * pSparseMemoryRequirementCount,
20530 d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device,
20531 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ),
20532 pSparseMemoryRequirementCount,
20533 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
20536 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20537 template <
typename SparseImageMemoryRequirements2Allocator,
typename Dispatch>
20543 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
20544 uint32_t sparseMemoryRequirementCount;
20545 d.vkGetDeviceImageSparseMemoryRequirementsKHR(
20546 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount,
nullptr );
20547 sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
20548 d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device,
20549 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
20550 &sparseMemoryRequirementCount,
20551 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
20553 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
20554 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
20556 sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
20558 return sparseMemoryRequirements;
20561 template <
typename SparseImageMemoryRequirements2Allocator,
20567 SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
20568 Dispatch
const & d )
const
20572 std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
20573 sparseImageMemoryRequirements2Allocator );
20574 uint32_t sparseMemoryRequirementCount;
20575 d.vkGetDeviceImageSparseMemoryRequirementsKHR(
20576 m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount,
nullptr );
20577 sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
20578 d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device,
20579 reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
20580 &sparseMemoryRequirementCount,
20581 reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
20583 VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
20584 if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
20586 sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
20588 return sparseMemoryRequirements;
20594 template <
typename Dispatch>
20600 d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device,
20601 reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( pBindingReference ),
20602 reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( pHostMapping ) );
20605 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20606 template <
typename Dispatch>
20614 d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device,
20615 reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( &bindingReference ),
20616 reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( &hostMapping ) );
20618 return hostMapping;
20622 template <
typename Dispatch>
20627 d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast<VkDescriptorSet>( descriptorSet ), ppData );
20630 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20631 template <
typename Dispatch>
20638 d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast<VkDescriptorSet>( descriptorSet ), &pData );
20646 template <
typename Dispatch>
20648 uint32_t copyCount,
20653 d.vkCmdCopyMemoryIndirectNV( m_commandBuffer, static_cast<VkDeviceAddress>( copyBufferAddress ), copyCount,
stride );
20656 template <
typename Dispatch>
20658 uint32_t copyCount,
20666 d.vkCmdCopyMemoryToImageIndirectNV( m_commandBuffer,
20667 static_cast<VkDeviceAddress>( copyBufferAddress ),
20670 static_cast<VkImage>( dstImage ),
20671 static_cast<VkImageLayout>( dstImageLayout ),
20672 reinterpret_cast<const VkImageSubresourceLayers *>( pImageSubresources ) );
20675 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20676 template <
typename Dispatch>
20687 d.vkCmdCopyMemoryToImageIndirectNV( m_commandBuffer,
20688 static_cast<VkDeviceAddress>( copyBufferAddress ),
20689 imageSubresources.size(),
20691 static_cast<VkImage
>( dstImage ),
20692 static_cast<VkImageLayout>( dstImageLayout ),
20699 template <
typename Dispatch>
20705 d.vkCmdDecompressMemoryNV( m_commandBuffer, decompressRegionCount, reinterpret_cast<const VkDecompressMemoryRegionNV *>( pDecompressMemoryRegions ) );
20708 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20709 template <
typename Dispatch>
20716 d.vkCmdDecompressMemoryNV(
20717 m_commandBuffer, decompressMemoryRegions.size(),
reinterpret_cast<const VkDecompressMemoryRegionNV *
>( decompressMemoryRegions.data() ) );
20721 template <
typename Dispatch>
20728 d.vkCmdDecompressMemoryIndirectCountNV(
20729 m_commandBuffer, static_cast<VkDeviceAddress>( indirectCommandsAddress ), static_cast<VkDeviceAddress>( indirectCommandsCountAddress ),
stride );
20734 template <
typename Dispatch>
20739 d.vkCmdSetTessellationDomainOriginEXT( m_commandBuffer, static_cast<VkTessellationDomainOrigin>( domainOrigin ) );
20742 template <
typename Dispatch>
20746 d.vkCmdSetDepthClampEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthClampEnable ) );
20749 template <
typename Dispatch>
20753 d.vkCmdSetPolygonModeEXT( m_commandBuffer, static_cast<VkPolygonMode>( polygonMode ) );
20756 template <
typename Dispatch>
20761 d.vkCmdSetRasterizationSamplesEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( rasterizationSamples ) );
20764 template <
typename Dispatch>
20770 d.vkCmdSetSampleMaskEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>(
samples ), reinterpret_cast<const VkSampleMask *>( pSampleMask ) );
20773 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20774 template <
typename Dispatch>
20781 d.vkCmdSetSampleMaskEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>(
samples ), reinterpret_cast<const VkSampleMask *>( sampleMask.data() ) );
20785 template <
typename Dispatch>
20790 d.vkCmdSetAlphaToCoverageEnableEXT( m_commandBuffer, static_cast<VkBool32>( alphaToCoverageEnable ) );
20793 template <
typename Dispatch>
20797 d.vkCmdSetAlphaToOneEnableEXT( m_commandBuffer, static_cast<VkBool32>( alphaToOneEnable ) );
20800 template <
typename Dispatch>
20804 d.vkCmdSetLogicOpEnableEXT( m_commandBuffer, static_cast<VkBool32>( logicOpEnable ) );
20807 template <
typename Dispatch>
20809 uint32_t attachmentCount,
20814 d.vkCmdSetColorBlendEnableEXT( m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorBlendEnables ) );
20817 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20818 template <
typename Dispatch>
20825 d.vkCmdSetColorBlendEnableEXT( m_commandBuffer, firstAttachment, colorBlendEnables.size(),
reinterpret_cast<const VkBool32 *
>( colorBlendEnables.data() ) );
20829 template <
typename Dispatch>
20831 uint32_t attachmentCount,
20836 d.vkCmdSetColorBlendEquationEXT(
20837 m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorBlendEquationEXT *>( pColorBlendEquations ) );
20840 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20841 template <
typename Dispatch>
20849 d.vkCmdSetColorBlendEquationEXT(
20850 m_commandBuffer, firstAttachment, colorBlendEquations.size(),
reinterpret_cast<const VkColorBlendEquationEXT *
>( colorBlendEquations.data() ) );
20854 template <
typename Dispatch>
20856 uint32_t attachmentCount,
20861 d.vkCmdSetColorWriteMaskEXT( m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorComponentFlags *>( pColorWriteMasks ) );
20864 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20865 template <
typename Dispatch>
20873 d.vkCmdSetColorWriteMaskEXT(
20874 m_commandBuffer, firstAttachment, colorWriteMasks.size(),
reinterpret_cast<const VkColorComponentFlags *
>( colorWriteMasks.data() ) );
20878 template <
typename Dispatch>
20882 d.vkCmdSetRasterizationStreamEXT( m_commandBuffer, rasterizationStream );
20885 template <
typename Dispatch>
20891 d.vkCmdSetConservativeRasterizationModeEXT( m_commandBuffer, static_cast<VkConservativeRasterizationModeEXT>( conservativeRasterizationMode ) );
20894 template <
typename Dispatch>
20899 d.vkCmdSetExtraPrimitiveOverestimationSizeEXT( m_commandBuffer, extraPrimitiveOverestimationSize );
20902 template <
typename Dispatch>
20906 d.vkCmdSetDepthClipEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthClipEnable ) );
20909 template <
typename Dispatch>
20914 d.vkCmdSetSampleLocationsEnableEXT( m_commandBuffer, static_cast<VkBool32>( sampleLocationsEnable ) );
20917 template <
typename Dispatch>
20919 uint32_t attachmentCount,
20924 d.vkCmdSetColorBlendAdvancedEXT(
20925 m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorBlendAdvancedEXT *>( pColorBlendAdvanced ) );
20928 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20929 template <
typename Dispatch>
20937 d.vkCmdSetColorBlendAdvancedEXT(
20938 m_commandBuffer, firstAttachment, colorBlendAdvanced.size(),
reinterpret_cast<const VkColorBlendAdvancedEXT *
>( colorBlendAdvanced.data() ) );
20942 template <
typename Dispatch>
20947 d.vkCmdSetProvokingVertexModeEXT( m_commandBuffer, static_cast<VkProvokingVertexModeEXT>( provokingVertexMode ) );
20950 template <
typename Dispatch>
20955 d.vkCmdSetLineRasterizationModeEXT( m_commandBuffer, static_cast<VkLineRasterizationModeEXT>( lineRasterizationMode ) );
20958 template <
typename Dispatch>
20962 d.vkCmdSetLineStippleEnableEXT( m_commandBuffer, static_cast<VkBool32>( stippledLineEnable ) );
20965 template <
typename Dispatch>
20970 d.vkCmdSetDepthClipNegativeOneToOneEXT( m_commandBuffer, static_cast<VkBool32>( negativeOneToOne ) );
20973 template <
typename Dispatch>
20978 d.vkCmdSetViewportWScalingEnableNV( m_commandBuffer, static_cast<VkBool32>( viewportWScalingEnable ) );
20981 template <
typename Dispatch>
20983 uint32_t viewportCount,
20988 d.vkCmdSetViewportSwizzleNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportSwizzleNV *>( pViewportSwizzles ) );
20991 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20992 template <
typename Dispatch>
21000 d.vkCmdSetViewportSwizzleNV(
21001 m_commandBuffer, firstViewport, viewportSwizzles.size(),
reinterpret_cast<const VkViewportSwizzleNV *
>( viewportSwizzles.data() ) );
21005 template <
typename Dispatch>
21010 d.vkCmdSetCoverageToColorEnableNV( m_commandBuffer, static_cast<VkBool32>( coverageToColorEnable ) );
21013 template <
typename Dispatch>
21017 d.vkCmdSetCoverageToColorLocationNV( m_commandBuffer, coverageToColorLocation );
21020 template <
typename Dispatch>
21025 d.vkCmdSetCoverageModulationModeNV( m_commandBuffer, static_cast<VkCoverageModulationModeNV>( coverageModulationMode ) );
21028 template <
typename Dispatch>
21033 d.vkCmdSetCoverageModulationTableEnableNV( m_commandBuffer, static_cast<VkBool32>( coverageModulationTableEnable ) );
21036 template <
typename Dispatch>
21038 const float * pCoverageModulationTable,
21042 d.vkCmdSetCoverageModulationTableNV( m_commandBuffer, coverageModulationTableCount, pCoverageModulationTable );
21045 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21046 template <
typename Dispatch>
21052 d.vkCmdSetCoverageModulationTableNV( m_commandBuffer, coverageModulationTable.size(), coverageModulationTable.data() );
21056 template <
typename Dispatch>
21061 d.vkCmdSetShadingRateImageEnableNV( m_commandBuffer, static_cast<VkBool32>( shadingRateImageEnable ) );
21064 template <
typename Dispatch>
21069 d.vkCmdSetRepresentativeFragmentTestEnableNV( m_commandBuffer, static_cast<VkBool32>( representativeFragmentTestEnable ) );
21072 template <
typename Dispatch>
21077 d.vkCmdSetCoverageReductionModeNV( m_commandBuffer, static_cast<VkCoverageReductionModeNV>( coverageReductionMode ) );
21082 template <
typename Dispatch>
21088 d.vkGetShaderModuleIdentifierEXT( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) );
21091 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21092 template <
typename Dispatch>
21099 d.vkGetShaderModuleIdentifierEXT( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) );
21105 template <
typename Dispatch>
21111 d.vkGetShaderModuleCreateInfoIdentifierEXT(
21112 m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) );
21115 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21116 template <
typename Dispatch>
21124 d.vkGetShaderModuleCreateInfoIdentifierEXT(
21125 m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) );
21133 template <
typename Dispatch>
21136 uint32_t * pFormatCount,
21141 return static_cast<Result>(
21142 d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice,
21143 reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( pOpticalFlowImageFormatInfo ),
21145 reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( pImageFormatProperties ) ) );
21148 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21149 template <
typename OpticalFlowImageFormatPropertiesNVAllocator,
typename Dispatch>
21153 Dispatch
const & d )
const
21157 std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator> imageFormatProperties;
21158 uint32_t formatCount;
21162 result = d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV(
21163 m_physicalDevice, reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), &formatCount,
nullptr );
21164 if ( ( result ==
VK_SUCCESS ) && formatCount )
21166 imageFormatProperties.resize( formatCount );
21167 result = d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice,
21168 reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ),
21170 reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( imageFormatProperties.data() ) );
21175 if ( formatCount < imageFormatProperties.size() )
21177 imageFormatProperties.resize( formatCount );
21179 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
21182 template <
typename OpticalFlowImageFormatPropertiesNVAllocator,
21189 OpticalFlowImageFormatPropertiesNVAllocator & opticalFlowImageFormatPropertiesNVAllocator,
21190 Dispatch
const & d )
const
21194 std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator> imageFormatProperties(
21195 opticalFlowImageFormatPropertiesNVAllocator );
21196 uint32_t formatCount;
21200 result = d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV(
21201 m_physicalDevice, reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), &formatCount,
nullptr );
21202 if ( ( result ==
VK_SUCCESS ) && formatCount )
21204 imageFormatProperties.resize( formatCount );
21205 result = d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice,
21206 reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ),
21208 reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( imageFormatProperties.data() ) );
21213 if ( formatCount < imageFormatProperties.size() )
21215 imageFormatProperties.resize( formatCount );
21217 return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
21221 template <
typename Dispatch>
21228 return static_cast<Result>( d.vkCreateOpticalFlowSessionNV( m_device,
21229 reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( pCreateInfo ),
21230 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
21231 reinterpret_cast<VkOpticalFlowSessionNV *>( pSession ) ) );
21234 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21235 template <
typename Dispatch>
21239 Dispatch
const & d )
const
21244 VkResult result = d.vkCreateOpticalFlowSessionNV(
21246 reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( &createInfo ),
21247 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
21248 reinterpret_cast<VkOpticalFlowSessionNV *>( &session ) );
21254 # ifndef VULKAN_HPP_NO_SMART_HANDLE
21255 template <
typename Dispatch>
21259 Dispatch
const & d )
const
21264 VkResult result = d.vkCreateOpticalFlowSessionNV(
21266 reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( &createInfo ),
21267 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
21268 reinterpret_cast<VkOpticalFlowSessionNV *>( &session ) );
21272 static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
21278 template <
typename Dispatch>
21284 d.vkDestroyOpticalFlowSessionNV( m_device, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
21287 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21288 template <
typename Dispatch>
21295 d.vkDestroyOpticalFlowSessionNV(
21297 static_cast<VkOpticalFlowSessionNV>( session ),
21298 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
21302 template <
typename Dispatch>
21308 d.vkDestroyOpticalFlowSessionNV( m_device, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
21311 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21312 template <
typename Dispatch>
21319 d.vkDestroyOpticalFlowSessionNV(
21321 static_cast<VkOpticalFlowSessionNV>( session ),
21322 reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
21326 #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
21327 template <
typename Dispatch>
21335 return static_cast<Result>( d.vkBindOpticalFlowSessionImageNV( m_device,
21336 static_cast<VkOpticalFlowSessionNV>( session ),
21337 static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ),
21338 static_cast<VkImageView>(
view ),
21339 static_cast<VkImageLayout>( layout ) ) );
21342 template <
typename Dispatch>
21348 Dispatch
const & d )
const
21352 VkResult result = d.vkBindOpticalFlowSessionImageNV( m_device,
21353 static_cast<VkOpticalFlowSessionNV>( session ),
21354 static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ),
21355 static_cast<VkImageView>( view ),
21356 static_cast<VkImageLayout>( layout ) );
21363 template <
typename Dispatch>
21369 d.vkCmdOpticalFlowExecuteNV(
21370 m_commandBuffer, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( pExecuteInfo ) );
21373 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21374 template <
typename Dispatch>
21381 d.vkCmdOpticalFlowExecuteNV(
21382 m_commandBuffer, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( &executeInfo ) );
21388 template <
typename Dispatch>
21390 uint32_t * pPropertiesCount,
21395 return static_cast<Result>( d.vkGetFramebufferTilePropertiesQCOM(
21396 m_device, static_cast<VkFramebuffer>(
framebuffer ), pPropertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) );
21399 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21400 template <
typename TilePropertiesQCOMAllocator,
typename Dispatch>
21406 std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator> properties;
21407 uint32_t propertiesCount;
21411 result = d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount,
nullptr );
21412 if ( ( result ==
VK_SUCCESS ) && propertiesCount )
21414 properties.resize( propertiesCount );
21415 result = d.vkGetFramebufferTilePropertiesQCOM(
21416 m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( properties.data() ) );
21421 if ( propertiesCount < properties.size() )
21423 properties.resize( propertiesCount );
21428 template <
typename TilePropertiesQCOMAllocator,
21434 TilePropertiesQCOMAllocator & tilePropertiesQCOMAllocator,
21435 Dispatch
const & d )
const
21439 std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator> properties( tilePropertiesQCOMAllocator );
21440 uint32_t propertiesCount;
21444 result = d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount,
nullptr );
21445 if ( ( result ==
VK_SUCCESS ) && propertiesCount )
21447 properties.resize( propertiesCount );
21448 result = d.vkGetFramebufferTilePropertiesQCOM(
21449 m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( properties.data() ) );
21454 if ( propertiesCount < properties.size() )
21456 properties.resize( propertiesCount );
21462 template <
typename Dispatch>
21468 return static_cast<Result>( d.vkGetDynamicRenderingTilePropertiesQCOM(
21469 m_device, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ), reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) );
21472 #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21473 template <
typename Dispatch>
21480 d.vkGetDynamicRenderingTilePropertiesQCOM(
21481 m_device, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ), reinterpret_cast<VkTilePropertiesQCOM *>( &properties ) );
VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroups(uint32_t *pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLsizei GLenum GLsizei GLsizei GLuint memory
void dispatchBaseKHR(uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result importFenceFdKHR(const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR *pImportFenceFdInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
uint64_t getMemoryOpaqueCaptureAddressKHR(const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLuint GLsizei const GLuint const GLintptr const GLsizeiptr * sizes
void writeAccelerationStructuresPropertiesNV(uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV *pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result buildMicromapsEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, uint32_t infoCount, const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT *pInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setViewportWScalingEnableNV(VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getSparseImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 *pFormatInfo, uint32_t *pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t *pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR *pPresentModes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLuint GLsizei const GLchar * message
void getImageMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 *pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 *pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyQueryPool(VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawMeshTasksIndirectEXT(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result copyAccelerationStructureToMemoryKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createDebugUtilsMessengerEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT *pMessenger, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setPrimitiveTopologyEXT(VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversionKHR(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion *pYcbcrConversion, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getDescriptorSetLayoutHostMappingInfoVALVE(const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE *pBindingReference, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE *pHostMapping, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void releaseDisplayEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void copyAccelerationStructureNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createDisplayModeKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::DisplayModeKHR *pMode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void endDebugUtilsLabelEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getQueue(uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue *pQueue, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createSwapchainKHR(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR *pSwapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setLineStippleEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getPipelineExecutableStatisticsKHR(const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR *pExecutableInfo, uint32_t *pStatisticCount, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR *pStatistics, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result registerEventEXT(const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT *pDeviceEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::Fence *pFence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void pushConstants(VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void *pValues, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS ResultValueType< void >::type setEvent(VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void getBufferMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 *pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 *pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setLogicOpEXT(VULKAN_HPP_NAMESPACE::LogicOp logicOp, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getAccelerationStructureMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV *pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR *pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setMemoryPriorityEXT(VULKAN_HPP_NAMESPACE::DeviceMemory memory, float priority, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void decompressMemoryNV(uint32_t decompressRegionCount, const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV *pDecompressMemoryRegions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void pipelineBarrier(VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier *pImageMemoryBarriers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDepthBounds(float minDepthBounds, float maxDepthBounds, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawMeshTasksEXT(uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result deferredOperationJoinKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result getDisplayPlaneSupportedDisplaysKHR(uint32_t planeIndex, uint32_t *pDisplayCount, VULKAN_HPP_NAMESPACE::DisplayKHR *pDisplays, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void traceRaysNV(VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDepthCompareOp(VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void bindDescriptorSets(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t *pDynamicOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyBufferView(VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setExclusiveScissorNV(uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D *pExclusiveScissors, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
#define VULKAN_HPP_NOEXCEPT
void writeBufferMarker2AMD(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setPrimitiveTopology(VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< DataType >::type getRayTracingCaptureReplayShaderGroupHandleKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::Sampler, Dispatch > >::type createSamplerUnique(const VULKAN_HPP_NAMESPACE::SamplerCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::QueryPool, Dispatch > >::type createQueryPoolUnique(const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result getMemoryFdPropertiesKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR *pMemoryFdProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getMemoryProperties(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties *pMemoryProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroySampler(VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result setPerformanceStreamMarkerINTEL(const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL *pMarkerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::Fence, Dispatch > >::type registerDisplayEventEXTUnique(VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT &displayEventInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void destroyCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getSwapchainCounterEXT(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, uint64_t *pCounterValue, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getDeferredOperationResultKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
#define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
void setCoverageModulationTableNV(uint32_t coverageModulationTableCount, const float *pCoverageModulationTable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void beginRenderPass2(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo *pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo *pSubpassBeginInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createMicromapEXT(const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::MicromapEXT *pMicromap, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawMultiEXT(uint32_t drawCount, const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT *pVertexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void writeMicromapsPropertiesEXT(uint32_t micromapCount, const VULKAN_HPP_NAMESPACE::MicromapEXT *pMicromaps, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createPrivateDataSlot(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::PrivateDataSlot *pPrivateDataSlot, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< std::vector< UniqueHandle< VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch >, DescriptorSetAllocator > >::type allocateDescriptorSetsUnique(const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo &allocateInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::BufferView, Dispatch > >::type createBufferViewUnique(const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< DataType >::type getRayTracingShaderGroupHandleKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversion(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion *pYcbcrConversion, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setStencilTestEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getBufferOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT *pInfo, void *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getImageSubresourceLayout2EXT(VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT *pSubresource, VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT *pLayout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void dispatch(uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getPrivateData(VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setProvokingVertexModeEXT(VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setViewport(uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport *pViewports, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
ResultValueType< void >::type setPrivateData(VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void resolveImage2(const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 *pResolveImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void subpassShadingHUAWEI(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void blitImage2KHR(const VULKAN_HPP_NAMESPACE::BlitImageInfo2 *pBlitImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result allocateDescriptorSets(const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo *pAllocateInfo, VULKAN_HPP_NAMESPACE::DescriptorSet *pDescriptorSets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setHdrMetadataEXT(uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainKHR *pSwapchains, const VULKAN_HPP_NAMESPACE::HdrMetadataEXT *pMetadata, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result writeMicromapsPropertiesEXT(uint32_t micromapCount, const VULKAN_HPP_NAMESPACE::MicromapEXT *pMicromaps, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, void *pData, size_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void copyAccelerationStructureToMemoryKHR(const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getFenceStatus(VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result flushMappedMemoryRanges(uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange *pMemoryRanges, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void beginTransformFeedbackEXT(uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer *pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize *pCounterBufferOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getPerformanceParameterINTEL(VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, VULKAN_HPP_NAMESPACE::PerformanceValueINTEL *pValue, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createFramebuffer(const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::Framebuffer *pFramebuffer, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void beginConditionalRenderingEXT(const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT *pConditionalRenderingBegin, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void bindDescriptorBufferEmbeddedSamplersEXT(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch > >::type createAccelerationStructureKHRUnique(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void endRenderPass(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getDescriptorSetHostMappingVALVE(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, void **ppData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result begin(const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo *pBeginInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void endRenderPass2KHR(const VULKAN_HPP_NAMESPACE::SubpassEndInfo *pSubpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch > >::type createDebugUtilsMessengerEXTUnique(const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
PFN_vkVoidFunction getProcAddr(const char *pName, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result acquireNextImage2KHR(const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR *pAcquireInfo, uint32_t *pImageIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void updateDescriptorSets(uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet *pDescriptorWrites, uint32_t descriptorCopyCount, const VULKAN_HPP_NAMESPACE::CopyDescriptorSet *pDescriptorCopies, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createDescriptorPool(const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool *pDescriptorPool, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLsizei const GLchar *const * string
VULKAN_HPP_NODISCARD Result createIndirectCommandsLayoutNV(const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV *pIndirectCommandsLayout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLsizei const GLfloat * value
void getDescriptorSetLayoutSupport(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo *pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport *pSupport, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch > >::type createSamplerYcbcrConversionKHRUnique(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result waitSemaphores(const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo *pWaitInfo, uint64_t timeout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getDisplayPropertiesKHR(uint32_t *pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawMultiIndexedEXT(uint32_t drawCount, const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT *pIndexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride, const int32_t *pVertexOffset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result submit(uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo *pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setColorBlendEnableEXT(uint32_t firstAttachment, uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::Bool32 *pColorBlendEnables, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setPolygonModeEXT(VULKAN_HPP_NAMESPACE::PolygonMode polygonMode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void resetEvent2KHR(VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
DeviceAddress getBufferAddressKHR(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyFramebuffer(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setVertexInputEXT(uint32_t vertexBindingDescriptionCount, const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT *pVertexBindingDescriptions, uint32_t vertexAttributeDescriptionCount, const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT *pVertexAttributeDescriptions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setViewportShadingRatePaletteNV(uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV *pShadingRatePalettes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void updateDescriptorSetWithTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilitiesKHR(VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR *pCapabilities, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getQueueFamilyPerformanceQueryPassesKHR(const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR *pPerformanceQueryCreateInfo, uint32_t *pNumPasses, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyDescriptorUpdateTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch > >::type createSharedSwapchainKHRUnique(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result getDisplayPlaneProperties2KHR(uint32_t *pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
T const * data() const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch > >::type createDisplayPlaneSurfaceKHRUnique(const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain< X, Y, Z...> getFeatures2() const VULKAN_HPP_NOEXCEPT
void copyQueryPoolResults(VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result resetFences(uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence *pFences, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createRayTracingPipelinesNV(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV *pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::Pipeline *pPipelines, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLuint64 GLenum handleType
void copyBuffer2(const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 *pCopyBufferInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void clearColorImage(VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue *pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange *pRanges, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setRasterizerDiscardEnable(VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void opticalFlowExecuteNV(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV *pExecuteInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyDescriptorPool(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getImageSubresourceLayout(VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource *pSubresource, VULKAN_HPP_NAMESPACE::SubresourceLayout *pLayout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void bindVertexBuffers2EXT(uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer *pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize *pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize *pSizes, const VULKAN_HPP_NAMESPACE::DeviceSize *pStrides, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result mergeValidationCachesEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::ValidationCacheEXT *pSrcCaches, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setRasterizerDiscardEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceVersion(uint32_t *pApiVersion, Dispatch const &d) VULKAN_HPP_NOEXCEPT
void copyMemoryIndirectNV(VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void resetEvent(VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setEvent2KHR(VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo *pDependencyInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getExternalBufferPropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo *pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties *pExternalBufferProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VkFlags VkColorComponentFlags
void resetQueryPool(VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch > >::type createCuFunctionNVXUnique(const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS ResultValueType< void >::type bindBufferMemory(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool
void setViewportSwizzleNV(uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV *pViewportSwizzles, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitForPresentKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t presentId, uint64_t timeout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void destroyPrivateDataSlotEXT(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setFragmentShadingRateEnumNV(VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void beginDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT *pLabelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void updateDescriptorSetWithTemplate(VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createDevice(const VULKAN_HPP_NAMESPACE::DeviceCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::Device *pDevice, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getFormatProperties2(VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2 *pFormatProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void beginDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT *pLabelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getImageFormatProperties2(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 *pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2 *pImageFormatProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch > >::type allocateMemoryUnique(const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo &allocateInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result copyMemoryToAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getShaderModuleIdentifierEXT(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT *pIdentifier, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getToolProperties(uint32_t *pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties *pToolProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
Result getDynamicRenderingTilePropertiesQCOM(const VULKAN_HPP_NAMESPACE::RenderingInfo *pRenderingInfo, VULKAN_HPP_NAMESPACE::TilePropertiesQCOM *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void blitImage(VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit *pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch > >::type createDescriptorSetLayoutUnique(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void beginQueryIndexedEXT(VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void bindIndexBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void bindVertexBuffers(uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer *pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize *pOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
uint32_t descriptorSetCount
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch > >::type createPipelineCacheUnique(const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
GLenum GLenum GLsizei void * image
void endConditionalRenderingEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getCheckpointDataNV(uint32_t *pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointDataNV *pCheckpointData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch > >::type createDescriptorUpdateTemplateUnique(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void setPrimitiveRestartEnable(VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setViewportWScalingNV(uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV *pViewportWScalings, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDepthBoundsTestEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getGeneratedCommandsMemoryRequirementsNV(const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV *pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 *pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VkFlags VkPipelineStageFlags
**But if you need a result
VULKAN_HPP_NODISCARD Result getMemoryFdKHR(const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR *pGetFdInfo, int *pFd, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getDisplayModeProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t *pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setColorBlendAdvancedEXT(uint32_t firstAttachment, uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT *pColorBlendAdvanced, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
AccelerationStructureBuildTypeKHR
void setPrimitiveRestartEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawMeshTasksNV(uint32_t taskCount, uint32_t firstTask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getPresentRectanglesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t *pRectCount, VULKAN_HPP_NAMESPACE::Rect2D *pRects, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result copyMemoryToMicromapEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValue< std::vector< UniqueHandle< VULKAN_HPP_NAMESPACE::Pipeline, Dispatch >, PipelineAllocator > > createGraphicsPipelinesUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy< const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo > const &createInfos, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result getCalibrateableTimeDomainsEXT(uint32_t *pTimeDomainCount, VULKAN_HPP_NAMESPACE::TimeDomainEXT *pTimeDomains, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result acquireNextImageKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, uint32_t *pImageIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getMemoryCommitment(VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize *pCommittedMemoryInBytes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilities2KHR(const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR *pDisplayPlaneInfo, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR *pCapabilities, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void debugMarkerInsertEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT *pMarkerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getFormatProperties(VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties *pFormatProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getSurfaceCapabilitiesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR *pSurfaceCapabilities, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getGroupPeerMemoryFeatures(uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags *pPeerMemoryFeatures, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch > >::type createPrivateDataSlotUnique(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result createBuffer(const VULKAN_HPP_NAMESPACE::BufferCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::Buffer *pBuffer, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void writeAccelerationStructuresPropertiesKHR(uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR *pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result setPerformanceOverrideINTEL(const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL *pOverrideInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawIndexedIndirectCount(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void debugMarkerEndEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void traceRaysIndirect2KHR(VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setCoverageModulationModeNV(VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void buildAccelerationStructuresKHR(uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR *pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR *const *ppBuildRangeInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void dispatchBase(uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void writeTimestamp(VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch > >::type createIndirectCommandsLayoutNVUnique(const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplateKHR(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate *pDescriptorUpdateTemplate, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result acquirePerformanceConfigurationINTEL(const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL *pAcquireInfo, VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL *pConfiguration, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDepthBiasEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyShaderModule(VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getPipelineCacheData(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, size_t *pDataSize, void *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyPipeline(VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::RenderPass, Dispatch > >::type createRenderPass2KHRUnique(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
CopyAccelerationStructureModeKHR
VULKAN_HPP_NODISCARD Result submit2(uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo2 *pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
uint64_t getBufferOpaqueCaptureAddressKHR(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result copyMicromapEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR *pSurfaceCapabilities, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void endRenderPass2(const VULKAN_HPP_NAMESPACE::SubpassEndInfo *pSubpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain< X, Y, Z...> getProperties2KHR() const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getEventStatus(VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
GLuint GLsizei const GLuint const GLintptr * offsets
void copyBufferToImage2(const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 *pCopyBufferToImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getPrivateDataEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void cuLaunchKernelNVX(const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX *pLaunchInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawIndirect(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getImageOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT *pInfo, void *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getExternalSemaphorePropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo *pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties *pExternalSemaphoreProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void draw(uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValue< std::vector< UniqueHandle< VULKAN_HPP_NAMESPACE::Pipeline, Dispatch >, PipelineAllocator > > createComputePipelinesUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy< const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo > const &createInfos, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result getCalibratedTimestampsEXT(uint32_t timestampCount, const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT *pTimestampInfos, uint64_t *pTimestamps, uint64_t *pMaxDeviation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void blitImage2(const VULKAN_HPP_NAMESPACE::BlitImageInfo2 *pBlitImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS ResultValueType< void >::type waitIdle(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result getRefreshCycleDurationGOOGLE(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE *pDisplayTimingProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void copyBufferToImage(VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy *pRegions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setRasterizationSamplesEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getImageDrmFormatModifierPropertiesEXT(VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch > >::type getDrmDisplayEXTUnique(int32_t drmFd, uint32_t connectorId, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void getGroupPeerMemoryFeaturesKHR(uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags *pPeerMemoryFeatures, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
DeviceAddress getBufferAddress(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setStencilCompareMask(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch > >::type createSamplerYcbcrConversionUnique(const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD ResultValueType< DataType >::type getRayTracingShaderGroupHandleNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void getBufferMemoryRequirements(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::MemoryRequirements *pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getImageViewOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT *pInfo, void *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::Buffer, Dispatch > >::type createBufferUnique(const VULKAN_HPP_NAMESPACE::BufferCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
SurfaceCounterFlagBitsEXT
GLint GLsizei GLsizei height
VULKAN_HPP_NODISCARD ResultValue< UniqueHandle< VULKAN_HPP_NAMESPACE::Pipeline, Dispatch > > createGraphicsPipelineUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result getPipelineExecutablePropertiesKHR(const VULKAN_HPP_NAMESPACE::PipelineInfoKHR *pPipelineInfo, uint32_t *pExecutableCount, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyCuModuleNVX(VULKAN_HPP_NAMESPACE::CuModuleNVX module, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawMeshTasksIndirectCountEXT(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void bindTransformFeedbackBuffersEXT(uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer *pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize *pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize *pSizes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result bindSparse(uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindSparseInfo *pBindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void endTransformFeedbackEXT(uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer *pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize *pCounterBufferOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch > >::type createDeferredOperationKHRUnique(Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
AccelerationStructureCompatibilityKHR
void setCoverageReductionModeNV(VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getImageSparseMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image, uint32_t *pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements *pSparseMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createSampler(const VULKAN_HPP_NAMESPACE::SamplerCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::Sampler *pSampler, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createCuModuleNVX(const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::CuModuleNVX *pModule, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setAlphaToOneEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void waitEvents2KHR(uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event *pEvents, const VULKAN_HPP_NAMESPACE::DependencyInfo *pDependencyInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createRenderPass2KHR(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::RenderPass *pRenderPass, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroySemaphore(VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createPipelineLayout(const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::PipelineLayout *pPipelineLayout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setLocalDimmingAMD(VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getPipelinePropertiesEXT(const VULKAN_HPP_NAMESPACE::PipelineInfoEXT *pPipelineInfo, VULKAN_HPP_NAMESPACE::BaseOutStructure *pPipelineProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
ExternalMemoryHandleTypeFlagBits
void pushDescriptorSetKHR(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet *pDescriptorWrites, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDepthClipEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getMicromapBuildSizesEXT(VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT *pBuildInfo, VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT *pSizeInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getSparseImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, uint32_t *pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void unmapMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getShaderModuleCreateInfoIdentifierEXT(const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo *pCreateInfo, VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT *pIdentifier, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getDescriptorSetLayoutBindingOffsetEXT(VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, uint32_t binding, VULKAN_HPP_NAMESPACE::DeviceSize *pOffset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
uint32_t commandBufferCount
#define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
void destroyDeferredOperationKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void buildAccelerationStructureNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV *pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getImageMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 *pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 *pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createDebugReportCallbackEXT(const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT *pCallback, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getSubpassShadingMaxWorkgroupSizeHUAWEI(VULKAN_HPP_NAMESPACE::RenderPass renderpass, VULKAN_HPP_NAMESPACE::Extent2D *pMaxWorkgroupSize, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result enumeratePhysicalDevices(uint32_t *pPhysicalDeviceCount, VULKAN_HPP_NAMESPACE::PhysicalDevice *pPhysicalDevices, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getRenderAreaGranularity(VULKAN_HPP_NAMESPACE::RenderPass renderPass, VULKAN_HPP_NAMESPACE::Extent2D *pGranularity, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void() free(VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyBuffer(VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getMultisamplePropertiesEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT *pMultisampleProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setStencilReference(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setEvent(VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setScissorWithCount(uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D *pScissors, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getDisplayPlanePropertiesKHR(uint32_t *pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroySamplerYcbcrConversionKHR(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawIndexedIndirectCountKHR(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result bindImageMemory2(uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo *pBindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getExternalBufferProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo *pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties *pExternalBufferProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getToolPropertiesEXT(uint32_t *pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties *pToolProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector< StructureChain > getQueueFamilyProperties2KHR() const
void beginRenderingKHR(const VULKAN_HPP_NAMESPACE::RenderingInfo *pRenderingInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setLineWidth(float lineWidth, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
#define VULKAN_HPP_NAMESPACE
void destroyPipelineCache(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void trimCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyDebugReportCallbackEXT(VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDiscardRectangleEXT(uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D *pDiscardRectangles, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch > >::type createDebugReportCallbackEXTUnique(const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void copyMicromapToMemoryEXT(const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDepthCompareOpEXT(VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void(VKAPI_PTR * PFN_vkVoidFunction)(void)
void getExternalFenceProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo *pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties *pExternalFenceProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLbitfield GLuint64 timeout
DeviceAddress getBufferAddressEXT(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getSurfaceFormatsKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t *pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR *pSurfaceFormats, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result mapMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, void **ppData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::Semaphore, Dispatch > >::type createSemaphoreUnique(const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void bindDescriptorBuffersEXT(uint32_t bufferCount, const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT *pBindingInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void copyImage2(const VULKAN_HPP_NAMESPACE::CopyImageInfo2 *pCopyImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setColorWriteMaskEXT(uint32_t firstAttachment, uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ColorComponentFlags *pColorWriteMasks, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyPipelineLayout(VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
uint32_t size() const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValue< UniqueHandle< VULKAN_HPP_NAMESPACE::Pipeline, Dispatch > > createRayTracingPipelineKHRUnique(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void insertDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT *pLabelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::RenderPass, Dispatch > >::type createRenderPass2Unique(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void setFragmentShadingRateKHR(const VULKAN_HPP_NAMESPACE::Extent2D *pFragmentSize, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyDescriptorSetLayout(VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setLineStippleEXT(uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result signalSemaphore(const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo *pSignalInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createImageView(const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::ImageView *pView, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createGraphicsPipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo *pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::Pipeline *pPipelines, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result displayPowerControlEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT *pDisplayPowerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValue< DataType > getQueryPoolResult(VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result getFaultInfoEXT(VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT *pFaultCounts, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT *pFaultInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< VULKAN_HPP_NAMESPACE::SwapchainKHR >::type createSharedSwapchainKHR(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result createDescriptorSetLayout(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::DescriptorSetLayout *pSetLayout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLint GLint GLsizei GLint GLenum format
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch > >::type createDescriptorUpdateTemplateKHRUnique(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result getAccelerationStructureHandleNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, void *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
ResultValueType< void >::type reset(VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD ResultValueType< std::vector< UniqueHandle< VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch >, SwapchainKHRAllocator > >::type createSharedSwapchainsKHRUnique(VULKAN_HPP_NAMESPACE::ArrayProxy< const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR > const &createInfos, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
uint32_t getDeferredOperationMaxConcurrencyKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLint GLenum GLboolean GLsizei stride
VULKAN_HPP_NODISCARD ResultValue< UniqueHandle< VULKAN_HPP_NAMESPACE::Pipeline, Dispatch > > createComputePipelineUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV, Dispatch > >::type createOpticalFlowSessionNVUnique(const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void freeMemory(VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setViewportWithCountEXT(uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport *pViewports, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getAccelerationStructureCompatibilityKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR *pVersionInfo, VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR *pCompatibility, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLsizei const GLenum * attachments
uint32_t getImageViewHandleNVX(const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void executeCommands(uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer *pCommandBuffers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_INLINE void resultCheck(Result result, char const *message)
void setRasterizationStreamEXT(uint32_t rasterizationStream, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void resetQueryPool(VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::Fence, Dispatch > >::type registerEventEXTUnique(const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT &deviceEventInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result bindAccelerationStructureMemoryNV(uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV *pBindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDepthBoundsTestEnable(VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValue< VULKAN_HPP_NAMESPACE::Pipeline > createRayTracingPipelineNV(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void getBufferMemoryRequirements2(const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 *pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 *pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result importSemaphoreFdKHR(const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
DeviceAddress getAccelerationStructureAddressKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result waitSemaphoresKHR(const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo *pWaitInfo, uint64_t timeout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS ResultValueType< void >::type end(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result getValidationCacheDataEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, size_t *pDataSize, void *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createAccelerationStructureNV(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureNV *pAccelerationStructure, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void resolveImage2KHR(const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 *pResolveImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void writeTimestamp2KHR(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
*get result *(waiting if necessary)*A common idiom is to fire a bunch of sub tasks at the queue
void drawIndirectCountKHR(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
#define VULKAN_HPP_ASSERT
void endRendering(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void updateBuffer(VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyPrivateDataSlot(VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void submitDebugUtilsMessageEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT *pCallbackData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValue< VULKAN_HPP_NAMESPACE::Pipeline > createGraphicsPipeline(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void getDescriptorEXT(const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT *pDescriptorInfo, size_t dataSize, void *pDescriptor, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void resetQueryPoolEXT(VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getFramebufferTilePropertiesQCOM(VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, uint32_t *pPropertiesCount, VULKAN_HPP_NAMESPACE::TilePropertiesQCOM *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setRayTracingPipelineStackSizeKHR(uint32_t pipelineStackSize, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch > >::type createPrivateDataSlotEXTUnique(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result getFragmentShadingRatesKHR(uint32_t *pFragmentShadingRateCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR *pFragmentShadingRates, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NAMESPACE::CommandPool commandPool
VULKAN_HPP_NODISCARD ResultValue< VULKAN_HPP_NAMESPACE::Pipeline > createRayTracingPipelineKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void copyImageToBuffer2(const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 *pCopyImageToBufferInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void nextSubpass(VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS ResultValueType< void >::type waitIdle(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void waitEvents2(uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event *pEvents, const VULKAN_HPP_NAMESPACE::DependencyInfo *pDependencyInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getShaderInfoAMD(VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, size_t *pInfoSize, void *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getExternalSemaphoreProperties(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo *pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties *pExternalSemaphoreProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawIndirectCountAMD(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createHeadlessSurfaceEXT(const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR *pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setStencilOpEXT(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createRenderPass(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::RenderPass *pRenderPass, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
#define VULKAN_HPP_NODISCARD
GLuint const GLchar * name
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS ResultValueType< void >::type release(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void debugMarkerBeginEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT *pMarkerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< std::pair< uint64_t, uint64_t > >::type getCalibratedTimestampEXT(const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT ×tampInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result getAccelerationStructureOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT *pInfo, void *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValue< UniqueHandle< VULKAN_HPP_NAMESPACE::Pipeline, Dispatch > > createRayTracingPipelineNVUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch > >::type createDescriptorPoolUnique(const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result mergePipelineCaches(VULKAN_HPP_NAMESPACE::PipelineCache dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::PipelineCache *pSrcCaches, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyOpticalFlowSessionNV(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getFormatProperties2KHR(VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2 *pFormatProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setFrontFaceEXT(VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
ResultValueType< void >::type resetEvent(VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void bindInvocationMaskHUAWEI(VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result invalidateMappedMemoryRanges(uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange *pMemoryRanges, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch > >::type createAccelerationStructureNVUnique(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void beginRenderPass2KHR(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo *pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo *pSubpassBeginInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch > >::type createFramebufferUnique(const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void resolveImage(VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve *pRegions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroySamplerYcbcrConversion(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result allocateCommandBuffers(const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo *pAllocateInfo, VULKAN_HPP_NAMESPACE::CommandBuffer *pCommandBuffers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void pipelineBarrier2(const VULKAN_HPP_NAMESPACE::DependencyInfo *pDependencyInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties *pProperties, Dispatch const &d) VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createPipelineCache(const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::PipelineCache *pPipelineCache, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void preprocessGeneratedCommandsNV(const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV *pGeneratedCommandsInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroy(const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyImageView(VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLint GLint GLsizei GLsizei GLsizei depth
VULKAN_HPP_NODISCARD ResultValueType< std::vector< UniqueHandle< VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch >, CommandBufferAllocator > >::type allocateCommandBuffersUnique(const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo &allocateInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void executeGeneratedCommandsNV(VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV *pGeneratedCommandsInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void copyImage2KHR(const VULKAN_HPP_NAMESPACE::CopyImageInfo2 *pCopyImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getBufferMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements *pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 *pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result debugMarkerSetObjectTagEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT *pTagInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createRayTracingPipelinesKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR *pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::Pipeline *pPipelines, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getSemaphoreCounterValue(VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t *pValue, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDeviceMaskKHR(uint32_t deviceMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch > >::type acquirePerformanceConfigurationINTELUnique(const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL &acquireInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void setExtraPrimitiveOverestimationSizeEXT(float extraPrimitiveOverestimationSize, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDepthWriteEnable(VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void copyBufferToImage2KHR(const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 *pCopyBufferToImageInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result presentKHR(const VULKAN_HPP_NAMESPACE::PresentInfoKHR *pPresentInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setStencilOp(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void endRenderingKHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createQueryPool(const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::QueryPool *pQueryPool, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::Fence, Dispatch > >::type createFenceUnique(const VULKAN_HPP_NAMESPACE::FenceCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void traceRaysKHR(const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR *pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR *pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR *pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR *pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDepthClampEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void copyMemoryToMicromapEXT(const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void beginQuery(VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void buildAccelerationStructuresIndirectKHR(uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR *pInfos, const VULKAN_HPP_NAMESPACE::DeviceAddress *pIndirectDeviceAddresses, const uint32_t *pIndirectStrides, const uint32_t *const *ppMaxPrimitiveCounts, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain< X, Y, Z...> getFeatures2KHR() const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getSwapchainImagesKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t *pSwapchainImageCount, VULKAN_HPP_NAMESPACE::Image *pSwapchainImages, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
ConservativeRasterizationModeEXT
void setStencilTestEnable(VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getSemaphoreCounterValueKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t *pValue, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setCoverageToColorEnableNV(VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< DataType >::type writeAccelerationStructuresPropertyKHR(VULKAN_HPP_NAMESPACE::ArrayProxy< const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR > const &accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result enumerateDeviceExtensionProperties(const char *pLayerName, uint32_t *pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result enumerateDeviceLayerProperties(uint32_t *pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void bindShadingRateImageNV(VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getPipelineExecutableInternalRepresentationsKHR(const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR *pExecutableInfo, uint32_t *pInternalRepresentationCount, VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR *pInternalRepresentations, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void beginRendering(const VULKAN_HPP_NAMESPACE::RenderingInfo *pRenderingInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void fillBuffer(VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createBufferView(const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::BufferView *pView, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDepthWriteEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result bindBufferMemory2KHR(uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo *pBindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getMicromapCompatibilityEXT(const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT *pVersionInfo, VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR *pCompatibility, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createImage(const VULKAN_HPP_NAMESPACE::ImageCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::Image *pImage, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLsizeiptr const void GLenum usage
void setCoarseSampleOrderNV(VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV *pCustomSampleOrders, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getGroupPresentCapabilitiesKHR(VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR *pDeviceGroupPresentCapabilities, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS ResultValueType< void >::type bindImageMemory(VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void setCoverageToColorLocationNV(uint32_t coverageToColorLocation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createSharedSwapchainsKHR(uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR *pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR *pSwapchains, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result acquireProfilingLockKHR(const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void writeBufferMarkerAMD(VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getOpticalFlowImageFormatsNV(const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV *pOpticalFlowImageFormatInfo, uint32_t *pFormatCount, VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV *pImageFormatProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDepthBias(float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyRenderPass(VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createDisplayPlaneSurfaceKHR(const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR *pSurface, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void pipelineBarrier2KHR(const VULKAN_HPP_NAMESPACE::DependencyInfo *pDependencyInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS ResultValueType< void >::type bindOpticalFlowSessionImageNV(VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, VULKAN_HPP_NAMESPACE::ImageView view, VULKAN_HPP_NAMESPACE::ImageLayout layout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroupsKHR(uint32_t *pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setFrontFace(VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void copyImageToBuffer(VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy *pRegions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
#define VULKAN_HPP_NAMESPACE_STRING
VULKAN_HPP_NODISCARD Result initializePerformanceApiINTEL(const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL *pInitializeInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModesKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR *pModes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLenum GLsizei GLsizei GLint * values
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::MicromapEXT, Dispatch > >::type createMicromapEXTUnique(const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void copyAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS ResultValueType< void >::type releasePerformanceConfigurationINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain< X, Y, Z...> getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT
void bindPipeline(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createSemaphore(const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::Semaphore *pSemaphore, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result copyMicromapToMemoryEXT(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void resetDescriptorPool(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void copyBuffer2KHR(const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 *pCopyBufferInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getSamplerOpaqueCaptureDescriptorDataEXT(const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT *pInfo, void *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getPastPresentationTimingGOOGLE(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t *pPresentationTimingCount, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE *pPresentationTimings, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createFence(const VULKAN_HPP_NAMESPACE::FenceCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::Fence *pFence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain< X, Y, Z...> getProperties2() const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch > >::type createDisplayModeKHRUnique(VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void setSampleLocationsEXT(const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT *pSampleLocationsInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setScissor(uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D *pScissors, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void freeCommandBuffers(VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer *pCommandBuffers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setSampleMaskEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, const VULKAN_HPP_NAMESPACE::SampleMask *pSampleMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getImageMemoryRequirements(VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::MemoryRequirements *pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void nextSubpass2KHR(const VULKAN_HPP_NAMESPACE::SubpassBeginInfo *pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo *pSubpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void clearAttachments(uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment *pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect *pRects, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyCuFunctionNVX(VULKAN_HPP_NAMESPACE::CuFunctionNVX function, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getQueue2(const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 *pQueueInfo, VULKAN_HPP_NAMESPACE::Queue *pQueue, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyFence(VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createInstance(const VULKAN_HPP_NAMESPACE::InstanceCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::Instance *pInstance, Dispatch const &d) VULKAN_HPP_NOEXCEPT
void trimCommandPoolKHR(VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyValidationCacheEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setConservativeRasterizationModeEXT(VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR *pAccelerationStructure, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector< StructureChain > getQueueFamilyProperties2() const
VULKAN_HPP_NODISCARD Result waitForFences(uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence *pFences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void decompressMemoryIndirectCountNV(VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress, VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result registerDisplayEventEXT(VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT *pDisplayEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::Fence *pFence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getSupportedFramebufferMixedSamplesCombinationsNV(uint32_t *pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV *pCombinations, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS ResultValueType< void >::type compileDeferredNV(VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getSwapchainStatusKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void setColorWriteEnableEXT(uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::Bool32 *pColorWriteEnables, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setCoverageModulationTableEnableNV(VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
#define VK_HEADER_VERSION
void copyMicromapEXT(const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getCooperativeMatrixPropertiesNV(uint32_t *pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain< X, Y, Z...> getMemoryProperties2() const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createComputePipelines(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo *pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::Pipeline *pPipelines, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getExternalFencePropertiesKHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo *pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties *pExternalFenceProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result setDebugUtilsObjectTagEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT *pTagInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDepthTestEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result setPerformanceMarkerINTEL(const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL *pMarkerInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createRenderPass2(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::RenderPass *pRenderPass, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::Device, Dispatch > >::type createDeviceUnique(const VULKAN_HPP_NAMESPACE::DeviceCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void getSparseImageFormatProperties2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 *pFormatInfo, uint32_t *pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyImage(VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setColorBlendEquationEXT(uint32_t firstAttachment, uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT *pColorBlendEquations, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result writeAccelerationStructuresPropertiesKHR(uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR *pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, void *pData, size_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getImageSparseMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements *pInfo, uint32_t *pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 *pSparseMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawMeshTasksIndirectNV(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroySurfaceKHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
ResultValueType< void >::type resetCommandPool(VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result getMemoryRemoteAddressNV(const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV *pMemoryGetRemoteAddressInfo, VULKAN_HPP_NAMESPACE::RemoteAddressNV *pAddress, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getDescriptorSetLayoutSizeEXT(VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, VULKAN_HPP_NAMESPACE::DeviceSize *pLayoutSizeInBytes, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
DeviceSize getRayTracingShaderGroupStackSizeKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t group, VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawIndexedIndirect(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2EXT(VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT *pSurfaceCapabilities, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplate(const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate *pDescriptorUpdateTemplate, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch > >::type createSwapchainKHRUnique(const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result createCuFunctionNVX(const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::CuFunctionNVX *pFunction, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void clearDepthStencilImage(VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue *pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange *pRanges, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValue< std::vector< UniqueHandle< VULKAN_HPP_NAMESPACE::Pipeline, Dispatch >, PipelineAllocator > > createRayTracingPipelinesKHRUnique(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy< const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR > const &createInfos, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result createCommandPool(const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::CommandPool *pCommandPool, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setCullModeEXT(VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyDescriptorUpdateTemplate(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getSurfaceFormats2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo, uint32_t *pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR *pSurfaceFormats, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDepthBiasEnable(VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getQueryPoolResults(VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void *pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createOpticalFlowSessionNV(const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV *pSession, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroy(const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyEvent(VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getSemaphoreFdKHR(const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getImageViewAddressNVX(VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getSurfaceSupportKHR(uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::Bool32 *pSupported, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getProperties(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
ResultValueType< void >::type acquireDrmDisplayEXT(int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void endDebugUtilsLabelEXT(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void copyBuffer(VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy *pRegions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getDescriptorSetLayoutSupportKHR(const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo *pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport *pSupport, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getImageFormatProperties(VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties *pImageFormatProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void nextSubpass2(const VULKAN_HPP_NAMESPACE::SubpassBeginInfo *pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo *pSubpassEndInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void dispatchIndirect(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::RenderPass, Dispatch > >::type createRenderPassUnique(const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result createValidationCacheEXT(const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::ValidationCacheEXT *pValidationCache, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::ImageView, Dispatch > >::type createImageViewUnique(const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result createEvent(const VULKAN_HPP_NAMESPACE::EventCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::Event *pEvent, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawMeshTasksIndirectCountNV(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setShadingRateImageEnableNV(VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDeviceMask(uint32_t deviceMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void copyImageToBuffer2KHR(const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 *pCopyImageToBufferInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch > >::type createCuModuleNVXUnique(const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result getDisplayProperties2KHR(uint32_t *pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyIndirectCommandsLayoutNV(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void endQueryIndexedEXT(VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getImageSparseMemoryRequirements2KHR(const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 *pInfo, uint32_t *pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 *pSparseMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getExternalImageFormatPropertiesNV(VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV *pExternalImageFormatProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getDisplayModePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t *pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR *pProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDescriptorBufferOffsetsEXT(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t setCount, const uint32_t *pBufferIndices, const VULKAN_HPP_NAMESPACE::DeviceSize *pOffsets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void copyMemoryToAccelerationStructureKHR(const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawIndexedIndirectCountAMD(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawIndexed(uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setLogicOpEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result copyAccelerationStructureKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getImageFormatProperties2KHR(const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 *pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2 *pImageFormatProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setRepresentativeFragmentTestEnableNV(VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getImageMemoryRequirementsKHR(const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements *pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 *pMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void bindVertexBuffers2(uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer *pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize *pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize *pSizes, const VULKAN_HPP_NAMESPACE::DeviceSize *pStrides, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch > >::type createShaderModuleUnique(const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void setLineRasterizationModeEXT(VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setBlendConstants(const float blendConstants[4], Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getCheckpointData2NV(uint32_t *pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointData2NV *pCheckpointData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch > >::type createValidationCacheEXTUnique(const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void setStencilWriteMask(VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
OpticalFlowSessionBindingPointNV
VULKAN_HPP_NODISCARD Result bindImageMemory2KHR(uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo *pBindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getImageSparseMemoryRequirements2(const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 *pInfo, uint32_t *pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 *pSparseMemoryRequirements, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setDepthTestEnable(VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
ResultValueType< void >::type setPrivateDataEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
that also have some descendant prim *whose name begins with which in turn has a child named baz where *the predicate and *a name There is also one special expression reference
void setTessellationDomainOriginEXT(VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getDrmDisplayEXT(int32_t drmFd, uint32_t connectorId, VULKAN_HPP_NAMESPACE::DisplayKHR *display, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::Event, Dispatch > >::type createEventUnique(const VULKAN_HPP_NAMESPACE::EventCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::Image, Dispatch > >::type createImageUnique(const VULKAN_HPP_NAMESPACE::ImageCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void setAlphaToCoverageEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VkFlags VkQueryResultFlags
void getAccelerationStructureBuildSizesKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR *pBuildInfo, const uint32_t *pMaxPrimitiveCounts, VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR *pSizeInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void beginRenderPass(const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo *pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawIndirectByteCountEXT(uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result enumerateQueueFamilyPerformanceQueryCountersKHR(uint32_t queueFamilyIndex, uint32_t *pCounterCount, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR *pCounters, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR *pCounterDescriptions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch > >::type createPipelineLayoutUnique(const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
VULKAN_HPP_NODISCARD Result allocateMemory(const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo *pAllocateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::DeviceMemory *pMemory, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValue< std::vector< UniqueHandle< VULKAN_HPP_NAMESPACE::Pipeline, Dispatch >, PipelineAllocator > > createRayTracingPipelinesNVUnique(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy< const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV > const &createInfos, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void debugReportMessageEXT(VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char *pLayerPrefix, const char *pMessage, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createShaderModule(const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::ShaderModule *pShaderModule, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setCullMode(VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch > >::type createHeadlessSurfaceEXTUnique(const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void setCheckpointNV(const void *pCheckpointMarker, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
PFN_vkVoidFunction getProcAddr(const char *pName, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result signalSemaphoreKHR(const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo *pSignalInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void traceRaysIndirectKHR(const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR *pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR *pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR *pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR *pCallableShaderBindingTable, VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_INLINE ResultValueType< void >::type createResultValueType(Result result)
VULKAN_HPP_NODISCARD Result debugMarkerSetObjectNameEXT(const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT *pNameInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void buildMicromapsEXT(uint32_t infoCount, const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT *pInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void insertDebugUtilsLabelEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT *pLabelInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void uninitializePerformanceApiINTEL(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result buildAccelerationStructuresKHR(VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR *pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR *const *ppBuildRangeInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValue< VULKAN_HPP_NAMESPACE::Pipeline > createComputePipeline(VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void copyMemoryToImageIndirectNV(VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers *pImageSubresources, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getMemoryHostPointerPropertiesEXT(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void *pHostPointer, VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT *pMemoryHostPointerProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getQueueFamilyProperties(uint32_t *pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties *pQueueFamilyProperties, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result submit2KHR(uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo2 *pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
uint64_t getBufferOpaqueCaptureAddress(const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getRayTracingCaptureReplayShaderGroupHandlesKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
FragmentShadingRateCombinerOpKHR
void writeTimestamp2(VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS ResultValueType< void >::type setPerformanceConfigurationINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void destroyAccelerationStructureNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void destroyDebugUtilsMessengerEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
Result freeDescriptorSets(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet *pDescriptorSets, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void bindPipelineShaderGroupNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result createDeferredOperationKHR(const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::DeferredOperationKHR *pDeferredOperation, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result bindBufferMemory2(uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo *pBindInfos, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< DataType >::type writeMicromapsPropertyEXT(VULKAN_HPP_NAMESPACE::ArrayProxy< const VULKAN_HPP_NAMESPACE::MicromapEXT > const µmaps, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
PerformanceParameterTypeINTEL
void pushDescriptorSetWithTemplateKHR(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void *pData, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void endQuery(VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result getFenceFdKHR(const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR *pGetFdInfo, int *pFd, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setSampleLocationsEnableEXT(VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void waitEvents(uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event *pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier *pImageMemoryBarriers, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setViewportWithCount(uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport *pViewports, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void drawIndirectCount(VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setScissorWithCountEXT(uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D *pScissors, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void resetEvent2(VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void copyImage(VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy *pRegions, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLsizei const GLuint const GLintptr const GLsizei * strides
VULKAN_HPP_NODISCARD Result createPrivateDataSlotEXT(const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo *pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, VULKAN_HPP_NAMESPACE::PrivateDataSlot *pPrivateDataSlot, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void setPatchControlPointsEXT(uint32_t patchControlPoints, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void releaseProfilingLockKHR(Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties(uint32_t *pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties *pProperties, Dispatch const &d) VULKAN_HPP_NOEXCEPT
uint64_t getMemoryOpaqueCaptureAddress(const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo *pInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
#define VULKAN_HPP_INLINE
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::Instance, Dispatch > >::type createInstanceUnique(const VULKAN_HPP_NAMESPACE::InstanceCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator, Dispatch const &d)
void destroySwapchainKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD ResultValueType< UniqueHandle< VULKAN_HPP_NAMESPACE::CommandPool, Dispatch > >::type createCommandPoolUnique(const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo &createInfo, Optional< const VULKAN_HPP_NAMESPACE::AllocationCallbacks > allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const
void destroyMicromapEXT(VULKAN_HPP_NAMESPACE::MicromapEXT micromap, const VULKAN_HPP_NAMESPACE::AllocationCallbacks *pAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
void getFeatures(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures *pFeatures, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
GLint GLint GLint GLint GLint GLint GLint GLbitfield GLenum filter
void setDepthClipNegativeOneToOneEXT(VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
DebugUtilsMessageSeverityFlagBitsEXT
void setEvent2(VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo *pDependencyInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT
VULKAN_HPP_NODISCARD Result setDebugUtilsObjectNameEXT(const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT *pNameInfo, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT) const VULKAN_HPP_NOEXCEPT