From bc510f017d707263fe70a029895bbd1a41a7db6a Mon Sep 17 00:00:00 2001 From: Marijn Suijten Date: Sat, 8 May 2021 12:25:10 +0200 Subject: [PATCH] generator: Add support for vkFlags64 and update to 1.2.170 (#411) * Update Vulkan-Headers to 1.2.169 * generator: Add support for vkFlags64 Since Vulkan-Headers v1.2.170 with VK_KHR_synchronization2 there are now bitmasks/enums using 64-bits instead of the default 32. vk-parse has been updated to convey this info though the typedefs for these enumerations could be parsed as well. * generator: Insert underscores before trailing type number Enum types like `VkAccessFlags2KHR` are turned into `VK_ACCESS2` after demangling and `SHOUTY_SNAKE_CASE` conversion by Heck, but the enum variants for the type start with `VK_ACCESS_2` (or similar) which fails the `strip_suffix` and leads to long names to show up. Inserting an underscore here makes sure the match succeeds. * Update Vulkan-Headers to 12.170 --- ash/src/vk/bitflags.rs | 89 +++- ash/src/vk/const_debugs.rs | 225 +++++++++- ash/src/vk/definitions.rs | 880 ++++++++++++++++++++++++++++++++++++- ash/src/vk/extensions.rs | 522 +++++++++++++++++++++- generator/Cargo.toml | 10 +- generator/Vulkan-Headers | 2 +- generator/src/lib.rs | 117 +++-- 7 files changed, 1777 insertions(+), 68 deletions(-) diff --git a/ash/src/vk/bitflags.rs b/ash/src/vk/bitflags.rs index ce0e9ed..a4fd78d 100644 --- a/ash/src/vk/bitflags.rs +++ b/ash/src/vk/bitflags.rs @@ -145,7 +145,7 @@ impl BufferCreateFlags { pub const SPARSE_BINDING: Self = Self(0b1); #[doc = "Buffer should support sparse backing with partial residency"] pub const SPARSE_RESIDENCY: Self = Self(0b10); - #[doc = "Buffer should support constent data access to physical memory ranges mapped into multiple locations of sparse buffers"] + #[doc = "Buffer should support constant data access to physical memory ranges mapped into multiple locations of sparse buffers"] pub const SPARSE_ALIASED: Self = Self(0b100); } #[repr(transparent)] @@ -200,7 +200,7 @@ impl ImageCreateFlags { pub const SPARSE_BINDING: Self = Self(0b1); #[doc = "Image should support sparse backing with partial residency"] pub const SPARSE_RESIDENCY: Self = Self(0b10); - #[doc = "Image should support constent data access to physical memory ranges mapped into multiple locations of sparse images"] + #[doc = "Image should support constant data access to physical memory ranges mapped into multiple locations of sparse images"] pub const SPARSE_ALIASED: Self = Self(0b100); #[doc = "Allows image views to have different format than the base image"] pub const MUTABLE_FORMAT: Self = Self(0b1000); @@ -961,3 +961,88 @@ impl ToolPurposeFlagsEXT { pub const ADDITIONAL_FEATURES: Self = Self(0b1000); pub const MODIFYING_FEATURES: Self = Self(0b1_0000); } +#[repr(transparent)] +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[doc = ""] +pub struct AccessFlags2KHR(pub(crate) Flags64); +vk_bitflags_wrapped!( + AccessFlags2KHR, + 0b111_0000_0000_0000_0001_1111_1111_1111_1111, + Flags64 +); +impl AccessFlags2KHR { + pub const NONE: Self = Self(0); + pub const INDIRECT_COMMAND_READ: Self = Self(0b1); + pub const INDEX_READ: Self = Self(0b10); + pub const VERTEX_ATTRIBUTE_READ: Self = Self(0b100); + pub const UNIFORM_READ: Self = Self(0b1000); + pub const INPUT_ATTACHMENT_READ: Self = Self(0b1_0000); + pub const SHADER_READ: Self = Self(0b10_0000); + pub const SHADER_WRITE: Self = Self(0b100_0000); + pub const COLOR_ATTACHMENT_READ: Self = Self(0b1000_0000); + pub const COLOR_ATTACHMENT_WRITE: Self = Self(0b1_0000_0000); + pub const DEPTH_STENCIL_ATTACHMENT_READ: Self = Self(0b10_0000_0000); + pub const DEPTH_STENCIL_ATTACHMENT_WRITE: Self = Self(0b100_0000_0000); + pub const TRANSFER_READ: Self = Self(0b1000_0000_0000); + pub const TRANSFER_WRITE: Self = Self(0b1_0000_0000_0000); + pub const HOST_READ: Self = Self(0b10_0000_0000_0000); + pub const HOST_WRITE: Self = Self(0b100_0000_0000_0000); + pub const MEMORY_READ: Self = Self(0b1000_0000_0000_0000); + pub const MEMORY_WRITE: Self = Self(0b1_0000_0000_0000_0000); + pub const SHADER_SAMPLED_READ: Self = Self(0b1_0000_0000_0000_0000_0000_0000_0000_0000); + pub const SHADER_STORAGE_READ: Self = Self(0b10_0000_0000_0000_0000_0000_0000_0000_0000); + pub const SHADER_STORAGE_WRITE: Self = Self(0b100_0000_0000_0000_0000_0000_0000_0000_0000); +} +#[repr(transparent)] +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[doc = ""] +pub struct PipelineStageFlags2KHR(pub(crate) Flags64); +vk_bitflags_wrapped!( + PipelineStageFlags2KHR, + 0b111_1111_0000_0000_0000_0001_1111_1111_1111_1111, + Flags64 +); +impl PipelineStageFlags2KHR { + pub const NONE: Self = Self(0); + pub const TOP_OF_PIPE: Self = Self(0b1); + pub const DRAW_INDIRECT: Self = Self(0b10); + pub const VERTEX_INPUT: Self = Self(0b100); + pub const VERTEX_SHADER: Self = Self(0b1000); + pub const TESSELLATION_CONTROL_SHADER: Self = Self(0b1_0000); + pub const TESSELLATION_EVALUATION_SHADER: Self = Self(0b10_0000); + pub const GEOMETRY_SHADER: Self = Self(0b100_0000); + pub const FRAGMENT_SHADER: Self = Self(0b1000_0000); + pub const EARLY_FRAGMENT_TESTS: Self = Self(0b1_0000_0000); + pub const LATE_FRAGMENT_TESTS: Self = Self(0b10_0000_0000); + pub const COLOR_ATTACHMENT_OUTPUT: Self = Self(0b100_0000_0000); + pub const COMPUTE_SHADER: Self = Self(0b1000_0000_0000); + pub const ALL_TRANSFER: Self = Self(0b1_0000_0000_0000); + pub const TRANSFER: Self = Self::ALL_TRANSFER; + pub const BOTTOM_OF_PIPE: Self = Self(0b10_0000_0000_0000); + pub const HOST: Self = Self(0b100_0000_0000_0000); + pub const ALL_GRAPHICS: Self = Self(0b1000_0000_0000_0000); + pub const ALL_COMMANDS: Self = Self(0b1_0000_0000_0000_0000); + pub const COPY: Self = Self(0b1_0000_0000_0000_0000_0000_0000_0000_0000); + pub const RESOLVE: Self = Self(0b10_0000_0000_0000_0000_0000_0000_0000_0000); + pub const BLIT: Self = Self(0b100_0000_0000_0000_0000_0000_0000_0000_0000); + pub const CLEAR: Self = Self(0b1000_0000_0000_0000_0000_0000_0000_0000_0000); + pub const INDEX_INPUT: Self = Self(0b1_0000_0000_0000_0000_0000_0000_0000_0000_0000); + pub const VERTEX_ATTRIBUTE_INPUT: Self = + Self(0b10_0000_0000_0000_0000_0000_0000_0000_0000_0000); + pub const PRE_RASTERIZATION_SHADERS: Self = + Self(0b100_0000_0000_0000_0000_0000_0000_0000_0000_0000); +} +#[repr(transparent)] +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[doc = ""] +pub struct SubmitFlagsKHR(pub(crate) Flags); +vk_bitflags_wrapped!(SubmitFlagsKHR, 0b1, Flags); +impl SubmitFlagsKHR { + pub const PROTECTED: Self = Self(0b1); +} +#[repr(transparent)] +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[doc = ""] +pub struct EventCreateFlags(pub(crate) Flags); +vk_bitflags_wrapped!(EventCreateFlags, 0b0, Flags); +impl EventCreateFlags {} diff --git a/ash/src/vk/const_debugs.rs b/ash/src/vk/const_debugs.rs index a767228..7df14f5 100644 --- a/ash/src/vk/const_debugs.rs +++ b/ash/src/vk/const_debugs.rs @@ -2,15 +2,16 @@ use crate::vk::bitflags::*; use crate::vk::definitions::*; use crate::vk::enums::*; use std::fmt; -pub(crate) fn debug_flags( +pub(crate) fn debug_flags + Copy>( f: &mut fmt::Formatter, - known: &[(Flags, &'static str)], - value: Flags, + known: &[(Value, &'static str)], + value: Value, ) -> fmt::Result { let mut first = true; - let mut accum = value; - for (bit, name) in known { - if *bit != 0 && accum & *bit == *bit { + let mut accum = value.into(); + for &(bit, name) in known { + let bit = bit.into(); + if bit != 0 && accum & bit == bit { if !first { f.write_str(" | ")?; } @@ -189,6 +190,109 @@ impl fmt::Debug for AccessFlags { AccessFlags::COMMAND_PREPROCESS_WRITE_NV.0, "COMMAND_PREPROCESS_WRITE_NV", ), + (AccessFlags::NONE_KHR.0, "NONE_KHR"), + ]; + debug_flags(f, KNOWN, self.0) + } +} +impl fmt::Debug for AccessFlags2KHR { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + const KNOWN: &[(Flags64, &str)] = &[ + (AccessFlags2KHR::NONE.0, "NONE"), + ( + AccessFlags2KHR::INDIRECT_COMMAND_READ.0, + "INDIRECT_COMMAND_READ", + ), + (AccessFlags2KHR::INDEX_READ.0, "INDEX_READ"), + ( + AccessFlags2KHR::VERTEX_ATTRIBUTE_READ.0, + "VERTEX_ATTRIBUTE_READ", + ), + (AccessFlags2KHR::UNIFORM_READ.0, "UNIFORM_READ"), + ( + AccessFlags2KHR::INPUT_ATTACHMENT_READ.0, + "INPUT_ATTACHMENT_READ", + ), + (AccessFlags2KHR::SHADER_READ.0, "SHADER_READ"), + (AccessFlags2KHR::SHADER_WRITE.0, "SHADER_WRITE"), + ( + AccessFlags2KHR::COLOR_ATTACHMENT_READ.0, + "COLOR_ATTACHMENT_READ", + ), + ( + AccessFlags2KHR::COLOR_ATTACHMENT_WRITE.0, + "COLOR_ATTACHMENT_WRITE", + ), + ( + AccessFlags2KHR::DEPTH_STENCIL_ATTACHMENT_READ.0, + "DEPTH_STENCIL_ATTACHMENT_READ", + ), + ( + AccessFlags2KHR::DEPTH_STENCIL_ATTACHMENT_WRITE.0, + "DEPTH_STENCIL_ATTACHMENT_WRITE", + ), + (AccessFlags2KHR::TRANSFER_READ.0, "TRANSFER_READ"), + (AccessFlags2KHR::TRANSFER_WRITE.0, "TRANSFER_WRITE"), + (AccessFlags2KHR::HOST_READ.0, "HOST_READ"), + (AccessFlags2KHR::HOST_WRITE.0, "HOST_WRITE"), + (AccessFlags2KHR::MEMORY_READ.0, "MEMORY_READ"), + (AccessFlags2KHR::MEMORY_WRITE.0, "MEMORY_WRITE"), + ( + AccessFlags2KHR::SHADER_SAMPLED_READ.0, + "SHADER_SAMPLED_READ", + ), + ( + AccessFlags2KHR::SHADER_STORAGE_READ.0, + "SHADER_STORAGE_READ", + ), + ( + AccessFlags2KHR::SHADER_STORAGE_WRITE.0, + "SHADER_STORAGE_WRITE", + ), + ( + AccessFlags2KHR::TRANSFORM_FEEDBACK_WRITE_EXT.0, + "TRANSFORM_FEEDBACK_WRITE_EXT", + ), + ( + AccessFlags2KHR::TRANSFORM_FEEDBACK_COUNTER_READ_EXT.0, + "TRANSFORM_FEEDBACK_COUNTER_READ_EXT", + ), + ( + AccessFlags2KHR::TRANSFORM_FEEDBACK_COUNTER_WRITE_EXT.0, + "TRANSFORM_FEEDBACK_COUNTER_WRITE_EXT", + ), + ( + AccessFlags2KHR::CONDITIONAL_RENDERING_READ_EXT.0, + "CONDITIONAL_RENDERING_READ_EXT", + ), + ( + AccessFlags2KHR::COMMAND_PREPROCESS_READ_NV.0, + "COMMAND_PREPROCESS_READ_NV", + ), + ( + AccessFlags2KHR::COMMAND_PREPROCESS_WRITE_NV.0, + "COMMAND_PREPROCESS_WRITE_NV", + ), + ( + AccessFlags2KHR::FRAGMENT_SHADING_RATE_ATTACHMENT_READ.0, + "FRAGMENT_SHADING_RATE_ATTACHMENT_READ", + ), + ( + AccessFlags2KHR::ACCELERATION_STRUCTURE_READ.0, + "ACCELERATION_STRUCTURE_READ", + ), + ( + AccessFlags2KHR::ACCELERATION_STRUCTURE_WRITE.0, + "ACCELERATION_STRUCTURE_WRITE", + ), + ( + AccessFlags2KHR::FRAGMENT_DENSITY_MAP_READ_EXT.0, + "FRAGMENT_DENSITY_MAP_READ_EXT", + ), + ( + AccessFlags2KHR::COLOR_ATTACHMENT_READ_NONCOHERENT_EXT.0, + "COLOR_ATTACHMENT_READ_NONCOHERENT_EXT", + ), ]; debug_flags(f, KNOWN, self.0) } @@ -1227,7 +1331,7 @@ impl fmt::Debug for DynamicState { } impl fmt::Debug for EventCreateFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - const KNOWN: &[(Flags, &str)] = &[]; + const KNOWN: &[(Flags, &str)] = &[(EventCreateFlags::DEVICE_ONLY_KHR.0, "DEVICE_ONLY_KHR")]; debug_flags(f, KNOWN, self.0) } } @@ -1956,6 +2060,8 @@ impl fmt::Debug for ImageLayout { Self::SHARED_PRESENT_KHR => Some("SHARED_PRESENT_KHR"), Self::SHADING_RATE_OPTIMAL_NV => Some("SHADING_RATE_OPTIMAL_NV"), Self::FRAGMENT_DENSITY_MAP_OPTIMAL_EXT => Some("FRAGMENT_DENSITY_MAP_OPTIMAL_EXT"), + Self::READ_ONLY_OPTIMAL_KHR => Some("READ_ONLY_OPTIMAL_KHR"), + Self::ATTACHMENT_OPTIMAL_KHR => Some("ATTACHMENT_OPTIMAL_KHR"), Self::DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL => { Some("DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL") } @@ -2629,6 +2735,7 @@ impl fmt::Debug for PipelineCreateFlags { PipelineCreateFlags::EARLY_RETURN_ON_FAILURE_EXT.0, "EARLY_RETURN_ON_FAILURE_EXT", ), + (PipelineCreateFlags::RESERVED_20_NV.0, "RESERVED_20_NV"), ( PipelineCreateFlags::VIEW_INDEX_FROM_DEVICE_INDEX.0, "VIEW_INDEX_FROM_DEVICE_INDEX", @@ -2820,6 +2927,90 @@ impl fmt::Debug for PipelineStageFlags { PipelineStageFlags::COMMAND_PREPROCESS_NV.0, "COMMAND_PREPROCESS_NV", ), + (PipelineStageFlags::NONE_KHR.0, "NONE_KHR"), + ]; + debug_flags(f, KNOWN, self.0) + } +} +impl fmt::Debug for PipelineStageFlags2KHR { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + const KNOWN: &[(Flags64, &str)] = &[ + (PipelineStageFlags2KHR::NONE.0, "NONE"), + (PipelineStageFlags2KHR::TOP_OF_PIPE.0, "TOP_OF_PIPE"), + (PipelineStageFlags2KHR::DRAW_INDIRECT.0, "DRAW_INDIRECT"), + (PipelineStageFlags2KHR::VERTEX_INPUT.0, "VERTEX_INPUT"), + (PipelineStageFlags2KHR::VERTEX_SHADER.0, "VERTEX_SHADER"), + ( + PipelineStageFlags2KHR::TESSELLATION_CONTROL_SHADER.0, + "TESSELLATION_CONTROL_SHADER", + ), + ( + PipelineStageFlags2KHR::TESSELLATION_EVALUATION_SHADER.0, + "TESSELLATION_EVALUATION_SHADER", + ), + (PipelineStageFlags2KHR::GEOMETRY_SHADER.0, "GEOMETRY_SHADER"), + (PipelineStageFlags2KHR::FRAGMENT_SHADER.0, "FRAGMENT_SHADER"), + ( + PipelineStageFlags2KHR::EARLY_FRAGMENT_TESTS.0, + "EARLY_FRAGMENT_TESTS", + ), + ( + PipelineStageFlags2KHR::LATE_FRAGMENT_TESTS.0, + "LATE_FRAGMENT_TESTS", + ), + ( + PipelineStageFlags2KHR::COLOR_ATTACHMENT_OUTPUT.0, + "COLOR_ATTACHMENT_OUTPUT", + ), + (PipelineStageFlags2KHR::COMPUTE_SHADER.0, "COMPUTE_SHADER"), + (PipelineStageFlags2KHR::ALL_TRANSFER.0, "ALL_TRANSFER"), + (PipelineStageFlags2KHR::BOTTOM_OF_PIPE.0, "BOTTOM_OF_PIPE"), + (PipelineStageFlags2KHR::HOST.0, "HOST"), + (PipelineStageFlags2KHR::ALL_GRAPHICS.0, "ALL_GRAPHICS"), + (PipelineStageFlags2KHR::ALL_COMMANDS.0, "ALL_COMMANDS"), + (PipelineStageFlags2KHR::COPY.0, "COPY"), + (PipelineStageFlags2KHR::RESOLVE.0, "RESOLVE"), + (PipelineStageFlags2KHR::BLIT.0, "BLIT"), + (PipelineStageFlags2KHR::CLEAR.0, "CLEAR"), + (PipelineStageFlags2KHR::INDEX_INPUT.0, "INDEX_INPUT"), + ( + PipelineStageFlags2KHR::VERTEX_ATTRIBUTE_INPUT.0, + "VERTEX_ATTRIBUTE_INPUT", + ), + ( + PipelineStageFlags2KHR::PRE_RASTERIZATION_SHADERS.0, + "PRE_RASTERIZATION_SHADERS", + ), + ( + PipelineStageFlags2KHR::TRANSFORM_FEEDBACK_EXT.0, + "TRANSFORM_FEEDBACK_EXT", + ), + ( + PipelineStageFlags2KHR::CONDITIONAL_RENDERING_EXT.0, + "CONDITIONAL_RENDERING_EXT", + ), + ( + PipelineStageFlags2KHR::COMMAND_PREPROCESS_NV.0, + "COMMAND_PREPROCESS_NV", + ), + ( + PipelineStageFlags2KHR::FRAGMENT_SHADING_RATE_ATTACHMENT.0, + "FRAGMENT_SHADING_RATE_ATTACHMENT", + ), + ( + PipelineStageFlags2KHR::ACCELERATION_STRUCTURE_BUILD.0, + "ACCELERATION_STRUCTURE_BUILD", + ), + ( + PipelineStageFlags2KHR::RAY_TRACING_SHADER.0, + "RAY_TRACING_SHADER", + ), + ( + PipelineStageFlags2KHR::FRAGMENT_DENSITY_PROCESS_EXT.0, + "FRAGMENT_DENSITY_PROCESS_EXT", + ), + (PipelineStageFlags2KHR::TASK_SHADER_NV.0, "TASK_SHADER_NV"), + (PipelineStageFlags2KHR::MESH_SHADER_NV.0, "MESH_SHADER_NV"), ]; debug_flags(f, KNOWN, self.0) } @@ -4271,6 +4462,20 @@ impl fmt::Debug for StructureType { Some("DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV") } Self::RESERVED_QCOM => Some("RESERVED_QCOM"), + Self::MEMORY_BARRIER_2_KHR => Some("MEMORY_BARRIER_2_KHR"), + Self::BUFFER_MEMORY_BARRIER_2_KHR => Some("BUFFER_MEMORY_BARRIER_2_KHR"), + Self::IMAGE_MEMORY_BARRIER_2_KHR => Some("IMAGE_MEMORY_BARRIER_2_KHR"), + Self::DEPENDENCY_INFO_KHR => Some("DEPENDENCY_INFO_KHR"), + Self::SUBMIT_INFO_2_KHR => Some("SUBMIT_INFO_2_KHR"), + Self::SEMAPHORE_SUBMIT_INFO_KHR => Some("SEMAPHORE_SUBMIT_INFO_KHR"), + Self::COMMAND_BUFFER_SUBMIT_INFO_KHR => Some("COMMAND_BUFFER_SUBMIT_INFO_KHR"), + Self::PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR => { + Some("PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR") + } + Self::QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV => { + Some("QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV") + } + Self::CHECKPOINT_DATA_2_NV => Some("CHECKPOINT_DATA_2_NV"), Self::PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR => { Some("PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR") } @@ -4578,6 +4783,12 @@ impl fmt::Debug for SubgroupFeatureFlags { debug_flags(f, KNOWN, self.0) } } +impl fmt::Debug for SubmitFlagsKHR { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + const KNOWN: &[(Flags, &str)] = &[(SubmitFlagsKHR::PROTECTED.0, "PROTECTED")]; + debug_flags(f, KNOWN, self.0) + } +} impl fmt::Debug for SubpassContents { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let name = match *self { diff --git a/ash/src/vk/definitions.rs b/ash/src/vk/definitions.rs index c2955d1..344a82f 100644 --- a/ash/src/vk/definitions.rs +++ b/ash/src/vk/definitions.rs @@ -9,7 +9,7 @@ use std::os::raw::*; pub const API_VERSION_1_0: u32 = crate::vk::make_version(1, 0, 0); pub const API_VERSION_1_1: u32 = crate::vk::make_version(1, 1, 0); pub const API_VERSION_1_2: u32 = crate::vk::make_version(1, 2, 0); -pub const HEADER_VERSION: u32 = 168u32; +pub const HEADER_VERSION: u32 = 170u32; pub const HEADER_VERSION_COMPLETE: u32 = crate::vk::make_version(1, 2, HEADER_VERSION); #[doc = ""] pub type SampleMask = u32; @@ -17,6 +17,8 @@ pub type SampleMask = u32; pub type Bool32 = u32; #[doc = ""] pub type Flags = u32; +#[doc = ""] +pub type Flags64 = u64; #[doc = ""] pub type DeviceSize = u64; #[doc = ""] @@ -98,11 +100,6 @@ pub struct SemaphoreCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(SemaphoreCreateFlags, 0b0, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -#[doc = ""] -pub struct EventCreateFlags(pub(crate) Flags); -vk_bitflags_wrapped!(EventCreateFlags, 0b0, Flags); -#[repr(transparent)] -#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct MemoryMapFlags(pub(crate) Flags); vk_bitflags_wrapped!(MemoryMapFlags, 0b0, Flags); @@ -12244,6 +12241,8 @@ pub struct Win32KeyedMutexAcquireReleaseInfoNVBuilder<'a> { } unsafe impl ExtendsSubmitInfo for Win32KeyedMutexAcquireReleaseInfoNVBuilder<'_> {} unsafe impl ExtendsSubmitInfo for Win32KeyedMutexAcquireReleaseInfoNV {} +unsafe impl ExtendsSubmitInfo2KHR for Win32KeyedMutexAcquireReleaseInfoNVBuilder<'_> {} +unsafe impl ExtendsSubmitInfo2KHR for Win32KeyedMutexAcquireReleaseInfoNV {} impl<'a> ::std::ops::Deref for Win32KeyedMutexAcquireReleaseInfoNVBuilder<'a> { type Target = Win32KeyedMutexAcquireReleaseInfoNV; fn deref(&self) -> &Self::Target { @@ -15700,6 +15699,8 @@ pub struct Win32KeyedMutexAcquireReleaseInfoKHRBuilder<'a> { } unsafe impl ExtendsSubmitInfo for Win32KeyedMutexAcquireReleaseInfoKHRBuilder<'_> {} unsafe impl ExtendsSubmitInfo for Win32KeyedMutexAcquireReleaseInfoKHR {} +unsafe impl ExtendsSubmitInfo2KHR for Win32KeyedMutexAcquireReleaseInfoKHRBuilder<'_> {} +unsafe impl ExtendsSubmitInfo2KHR for Win32KeyedMutexAcquireReleaseInfoKHR {} impl<'a> ::std::ops::Deref for Win32KeyedMutexAcquireReleaseInfoKHRBuilder<'a> { type Target = Win32KeyedMutexAcquireReleaseInfoKHR; fn deref(&self) -> &Self::Target { @@ -22685,6 +22686,8 @@ pub struct SampleLocationsInfoEXTBuilder<'a> { } unsafe impl ExtendsImageMemoryBarrier for SampleLocationsInfoEXTBuilder<'_> {} unsafe impl ExtendsImageMemoryBarrier for SampleLocationsInfoEXT {} +unsafe impl ExtendsImageMemoryBarrier2KHR for SampleLocationsInfoEXTBuilder<'_> {} +unsafe impl ExtendsImageMemoryBarrier2KHR for SampleLocationsInfoEXT {} impl<'a> ::std::ops::Deref for SampleLocationsInfoEXTBuilder<'a> { type Target = SampleLocationsInfoEXT; fn deref(&self) -> &Self::Target { @@ -36555,6 +36558,8 @@ pub struct PerformanceQuerySubmitInfoKHRBuilder<'a> { } unsafe impl ExtendsSubmitInfo for PerformanceQuerySubmitInfoKHRBuilder<'_> {} unsafe impl ExtendsSubmitInfo for PerformanceQuerySubmitInfoKHR {} +unsafe impl ExtendsSubmitInfo2KHR for PerformanceQuerySubmitInfoKHRBuilder<'_> {} +unsafe impl ExtendsSubmitInfo2KHR for PerformanceQuerySubmitInfoKHR {} impl<'a> ::std::ops::Deref for PerformanceQuerySubmitInfoKHRBuilder<'a> { type Target = PerformanceQuerySubmitInfoKHR; fn deref(&self) -> &Self::Target { @@ -45573,3 +45578,866 @@ impl<'a> MutableDescriptorTypeCreateInfoVALVEBuilder<'a> { self.inner } } +#[repr(C)] +#[derive(Copy, Clone, Debug)] +#[doc = ""] +pub struct MemoryBarrier2KHR { + pub s_type: StructureType, + pub p_next: *const c_void, + pub src_stage_mask: PipelineStageFlags2KHR, + pub src_access_mask: AccessFlags2KHR, + pub dst_stage_mask: PipelineStageFlags2KHR, + pub dst_access_mask: AccessFlags2KHR, +} +impl ::std::default::Default for MemoryBarrier2KHR { + fn default() -> MemoryBarrier2KHR { + MemoryBarrier2KHR { + s_type: StructureType::MEMORY_BARRIER_2_KHR, + p_next: ::std::ptr::null(), + src_stage_mask: PipelineStageFlags2KHR::default(), + src_access_mask: AccessFlags2KHR::default(), + dst_stage_mask: PipelineStageFlags2KHR::default(), + dst_access_mask: AccessFlags2KHR::default(), + } + } +} +impl MemoryBarrier2KHR { + pub fn builder<'a>() -> MemoryBarrier2KHRBuilder<'a> { + MemoryBarrier2KHRBuilder { + inner: MemoryBarrier2KHR::default(), + marker: ::std::marker::PhantomData, + } + } +} +#[repr(transparent)] +pub struct MemoryBarrier2KHRBuilder<'a> { + inner: MemoryBarrier2KHR, + marker: ::std::marker::PhantomData<&'a ()>, +} +unsafe impl ExtendsSubpassDependency2 for MemoryBarrier2KHRBuilder<'_> {} +unsafe impl ExtendsSubpassDependency2 for MemoryBarrier2KHR {} +impl<'a> ::std::ops::Deref for MemoryBarrier2KHRBuilder<'a> { + type Target = MemoryBarrier2KHR; + fn deref(&self) -> &Self::Target { + &self.inner + } +} +impl<'a> ::std::ops::DerefMut for MemoryBarrier2KHRBuilder<'a> { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.inner + } +} +impl<'a> MemoryBarrier2KHRBuilder<'a> { + pub fn src_stage_mask(mut self, src_stage_mask: PipelineStageFlags2KHR) -> Self { + self.inner.src_stage_mask = src_stage_mask; + self + } + pub fn src_access_mask(mut self, src_access_mask: AccessFlags2KHR) -> Self { + self.inner.src_access_mask = src_access_mask; + self + } + pub fn dst_stage_mask(mut self, dst_stage_mask: PipelineStageFlags2KHR) -> Self { + self.inner.dst_stage_mask = dst_stage_mask; + self + } + pub fn dst_access_mask(mut self, dst_access_mask: AccessFlags2KHR) -> Self { + self.inner.dst_access_mask = dst_access_mask; + self + } + #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"] + #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"] + #[doc = r" so references to builders can be passed directly to Vulkan functions."] + pub fn build(self) -> MemoryBarrier2KHR { + self.inner + } +} +#[repr(C)] +#[derive(Copy, Clone, Debug)] +#[doc = ""] +pub struct ImageMemoryBarrier2KHR { + pub s_type: StructureType, + pub p_next: *const c_void, + pub src_stage_mask: PipelineStageFlags2KHR, + pub src_access_mask: AccessFlags2KHR, + pub dst_stage_mask: PipelineStageFlags2KHR, + pub dst_access_mask: AccessFlags2KHR, + pub old_layout: ImageLayout, + pub new_layout: ImageLayout, + pub src_queue_family_index: u32, + pub dst_queue_family_index: u32, + pub image: Image, + pub subresource_range: ImageSubresourceRange, +} +impl ::std::default::Default for ImageMemoryBarrier2KHR { + fn default() -> ImageMemoryBarrier2KHR { + ImageMemoryBarrier2KHR { + s_type: StructureType::IMAGE_MEMORY_BARRIER_2_KHR, + p_next: ::std::ptr::null(), + src_stage_mask: PipelineStageFlags2KHR::default(), + src_access_mask: AccessFlags2KHR::default(), + dst_stage_mask: PipelineStageFlags2KHR::default(), + dst_access_mask: AccessFlags2KHR::default(), + old_layout: ImageLayout::default(), + new_layout: ImageLayout::default(), + src_queue_family_index: u32::default(), + dst_queue_family_index: u32::default(), + image: Image::default(), + subresource_range: ImageSubresourceRange::default(), + } + } +} +impl ImageMemoryBarrier2KHR { + pub fn builder<'a>() -> ImageMemoryBarrier2KHRBuilder<'a> { + ImageMemoryBarrier2KHRBuilder { + inner: ImageMemoryBarrier2KHR::default(), + marker: ::std::marker::PhantomData, + } + } +} +#[repr(transparent)] +pub struct ImageMemoryBarrier2KHRBuilder<'a> { + inner: ImageMemoryBarrier2KHR, + marker: ::std::marker::PhantomData<&'a ()>, +} +pub unsafe trait ExtendsImageMemoryBarrier2KHR {} +impl<'a> ::std::ops::Deref for ImageMemoryBarrier2KHRBuilder<'a> { + type Target = ImageMemoryBarrier2KHR; + fn deref(&self) -> &Self::Target { + &self.inner + } +} +impl<'a> ::std::ops::DerefMut for ImageMemoryBarrier2KHRBuilder<'a> { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.inner + } +} +impl<'a> ImageMemoryBarrier2KHRBuilder<'a> { + pub fn src_stage_mask(mut self, src_stage_mask: PipelineStageFlags2KHR) -> Self { + self.inner.src_stage_mask = src_stage_mask; + self + } + pub fn src_access_mask(mut self, src_access_mask: AccessFlags2KHR) -> Self { + self.inner.src_access_mask = src_access_mask; + self + } + pub fn dst_stage_mask(mut self, dst_stage_mask: PipelineStageFlags2KHR) -> Self { + self.inner.dst_stage_mask = dst_stage_mask; + self + } + pub fn dst_access_mask(mut self, dst_access_mask: AccessFlags2KHR) -> Self { + self.inner.dst_access_mask = dst_access_mask; + self + } + pub fn old_layout(mut self, old_layout: ImageLayout) -> Self { + self.inner.old_layout = old_layout; + self + } + pub fn new_layout(mut self, new_layout: ImageLayout) -> Self { + self.inner.new_layout = new_layout; + self + } + pub fn src_queue_family_index(mut self, src_queue_family_index: u32) -> Self { + self.inner.src_queue_family_index = src_queue_family_index; + self + } + pub fn dst_queue_family_index(mut self, dst_queue_family_index: u32) -> Self { + self.inner.dst_queue_family_index = dst_queue_family_index; + self + } + pub fn image(mut self, image: Image) -> Self { + self.inner.image = image; + self + } + pub fn subresource_range(mut self, subresource_range: ImageSubresourceRange) -> Self { + self.inner.subresource_range = subresource_range; + self + } + #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] + #[doc = r" method only exists on structs that can be passed to a function directly. Only"] + #[doc = r" valid extension structs can be pushed into the chain."] + #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"] + #[doc = r" chain will look like `A -> D -> B -> C`."] + pub fn push_next(mut self, next: &'a mut T) -> Self { + unsafe { + let next_ptr = next as *mut T as *mut BaseOutStructure; + let last_next = ptr_chain_iter(next).last().unwrap(); + (*last_next).p_next = self.inner.p_next as _; + self.inner.p_next = next_ptr as _; + } + self + } + #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"] + #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"] + #[doc = r" so references to builders can be passed directly to Vulkan functions."] + pub fn build(self) -> ImageMemoryBarrier2KHR { + self.inner + } +} +#[repr(C)] +#[derive(Copy, Clone, Debug)] +#[doc = ""] +pub struct BufferMemoryBarrier2KHR { + pub s_type: StructureType, + pub p_next: *const c_void, + pub src_stage_mask: PipelineStageFlags2KHR, + pub src_access_mask: AccessFlags2KHR, + pub dst_stage_mask: PipelineStageFlags2KHR, + pub dst_access_mask: AccessFlags2KHR, + pub src_queue_family_index: u32, + pub dst_queue_family_index: u32, + pub buffer: Buffer, + pub offset: DeviceSize, + pub size: DeviceSize, +} +impl ::std::default::Default for BufferMemoryBarrier2KHR { + fn default() -> BufferMemoryBarrier2KHR { + BufferMemoryBarrier2KHR { + s_type: StructureType::BUFFER_MEMORY_BARRIER_2_KHR, + p_next: ::std::ptr::null(), + src_stage_mask: PipelineStageFlags2KHR::default(), + src_access_mask: AccessFlags2KHR::default(), + dst_stage_mask: PipelineStageFlags2KHR::default(), + dst_access_mask: AccessFlags2KHR::default(), + src_queue_family_index: u32::default(), + dst_queue_family_index: u32::default(), + buffer: Buffer::default(), + offset: DeviceSize::default(), + size: DeviceSize::default(), + } + } +} +impl BufferMemoryBarrier2KHR { + pub fn builder<'a>() -> BufferMemoryBarrier2KHRBuilder<'a> { + BufferMemoryBarrier2KHRBuilder { + inner: BufferMemoryBarrier2KHR::default(), + marker: ::std::marker::PhantomData, + } + } +} +#[repr(transparent)] +pub struct BufferMemoryBarrier2KHRBuilder<'a> { + inner: BufferMemoryBarrier2KHR, + marker: ::std::marker::PhantomData<&'a ()>, +} +pub unsafe trait ExtendsBufferMemoryBarrier2KHR {} +impl<'a> ::std::ops::Deref for BufferMemoryBarrier2KHRBuilder<'a> { + type Target = BufferMemoryBarrier2KHR; + fn deref(&self) -> &Self::Target { + &self.inner + } +} +impl<'a> ::std::ops::DerefMut for BufferMemoryBarrier2KHRBuilder<'a> { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.inner + } +} +impl<'a> BufferMemoryBarrier2KHRBuilder<'a> { + pub fn src_stage_mask(mut self, src_stage_mask: PipelineStageFlags2KHR) -> Self { + self.inner.src_stage_mask = src_stage_mask; + self + } + pub fn src_access_mask(mut self, src_access_mask: AccessFlags2KHR) -> Self { + self.inner.src_access_mask = src_access_mask; + self + } + pub fn dst_stage_mask(mut self, dst_stage_mask: PipelineStageFlags2KHR) -> Self { + self.inner.dst_stage_mask = dst_stage_mask; + self + } + pub fn dst_access_mask(mut self, dst_access_mask: AccessFlags2KHR) -> Self { + self.inner.dst_access_mask = dst_access_mask; + self + } + pub fn src_queue_family_index(mut self, src_queue_family_index: u32) -> Self { + self.inner.src_queue_family_index = src_queue_family_index; + self + } + pub fn dst_queue_family_index(mut self, dst_queue_family_index: u32) -> Self { + self.inner.dst_queue_family_index = dst_queue_family_index; + self + } + pub fn buffer(mut self, buffer: Buffer) -> Self { + self.inner.buffer = buffer; + self + } + pub fn offset(mut self, offset: DeviceSize) -> Self { + self.inner.offset = offset; + self + } + pub fn size(mut self, size: DeviceSize) -> Self { + self.inner.size = size; + self + } + #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] + #[doc = r" method only exists on structs that can be passed to a function directly. Only"] + #[doc = r" valid extension structs can be pushed into the chain."] + #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"] + #[doc = r" chain will look like `A -> D -> B -> C`."] + pub fn push_next(mut self, next: &'a mut T) -> Self { + unsafe { + let next_ptr = next as *mut T as *mut BaseOutStructure; + let last_next = ptr_chain_iter(next).last().unwrap(); + (*last_next).p_next = self.inner.p_next as _; + self.inner.p_next = next_ptr as _; + } + self + } + #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"] + #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"] + #[doc = r" so references to builders can be passed directly to Vulkan functions."] + pub fn build(self) -> BufferMemoryBarrier2KHR { + self.inner + } +} +#[repr(C)] +#[derive(Copy, Clone, Debug)] +#[doc = ""] +pub struct DependencyInfoKHR { + pub s_type: StructureType, + pub p_next: *const c_void, + pub dependency_flags: DependencyFlags, + pub memory_barrier_count: u32, + pub p_memory_barriers: *const MemoryBarrier2KHR, + pub buffer_memory_barrier_count: u32, + pub p_buffer_memory_barriers: *const BufferMemoryBarrier2KHR, + pub image_memory_barrier_count: u32, + pub p_image_memory_barriers: *const ImageMemoryBarrier2KHR, +} +impl ::std::default::Default for DependencyInfoKHR { + fn default() -> DependencyInfoKHR { + DependencyInfoKHR { + s_type: StructureType::DEPENDENCY_INFO_KHR, + p_next: ::std::ptr::null(), + dependency_flags: DependencyFlags::default(), + memory_barrier_count: u32::default(), + p_memory_barriers: ::std::ptr::null(), + buffer_memory_barrier_count: u32::default(), + p_buffer_memory_barriers: ::std::ptr::null(), + image_memory_barrier_count: u32::default(), + p_image_memory_barriers: ::std::ptr::null(), + } + } +} +impl DependencyInfoKHR { + pub fn builder<'a>() -> DependencyInfoKHRBuilder<'a> { + DependencyInfoKHRBuilder { + inner: DependencyInfoKHR::default(), + marker: ::std::marker::PhantomData, + } + } +} +#[repr(transparent)] +pub struct DependencyInfoKHRBuilder<'a> { + inner: DependencyInfoKHR, + marker: ::std::marker::PhantomData<&'a ()>, +} +pub unsafe trait ExtendsDependencyInfoKHR {} +impl<'a> ::std::ops::Deref for DependencyInfoKHRBuilder<'a> { + type Target = DependencyInfoKHR; + fn deref(&self) -> &Self::Target { + &self.inner + } +} +impl<'a> ::std::ops::DerefMut for DependencyInfoKHRBuilder<'a> { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.inner + } +} +impl<'a> DependencyInfoKHRBuilder<'a> { + pub fn dependency_flags(mut self, dependency_flags: DependencyFlags) -> Self { + self.inner.dependency_flags = dependency_flags; + self + } + pub fn memory_barriers(mut self, memory_barriers: &'a [MemoryBarrier2KHR]) -> Self { + self.inner.memory_barrier_count = memory_barriers.len() as _; + self.inner.p_memory_barriers = memory_barriers.as_ptr(); + self + } + pub fn buffer_memory_barriers( + mut self, + buffer_memory_barriers: &'a [BufferMemoryBarrier2KHR], + ) -> Self { + self.inner.buffer_memory_barrier_count = buffer_memory_barriers.len() as _; + self.inner.p_buffer_memory_barriers = buffer_memory_barriers.as_ptr(); + self + } + pub fn image_memory_barriers( + mut self, + image_memory_barriers: &'a [ImageMemoryBarrier2KHR], + ) -> Self { + self.inner.image_memory_barrier_count = image_memory_barriers.len() as _; + self.inner.p_image_memory_barriers = image_memory_barriers.as_ptr(); + self + } + #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] + #[doc = r" method only exists on structs that can be passed to a function directly. Only"] + #[doc = r" valid extension structs can be pushed into the chain."] + #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"] + #[doc = r" chain will look like `A -> D -> B -> C`."] + pub fn push_next(mut self, next: &'a mut T) -> Self { + unsafe { + let next_ptr = next as *mut T as *mut BaseOutStructure; + let last_next = ptr_chain_iter(next).last().unwrap(); + (*last_next).p_next = self.inner.p_next as _; + self.inner.p_next = next_ptr as _; + } + self + } + #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"] + #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"] + #[doc = r" so references to builders can be passed directly to Vulkan functions."] + pub fn build(self) -> DependencyInfoKHR { + self.inner + } +} +#[repr(C)] +#[derive(Copy, Clone, Debug)] +#[doc = ""] +pub struct SemaphoreSubmitInfoKHR { + pub s_type: StructureType, + pub p_next: *const c_void, + pub semaphore: Semaphore, + pub value: u64, + pub stage_mask: PipelineStageFlags2KHR, + pub device_index: u32, +} +impl ::std::default::Default for SemaphoreSubmitInfoKHR { + fn default() -> SemaphoreSubmitInfoKHR { + SemaphoreSubmitInfoKHR { + s_type: StructureType::SEMAPHORE_SUBMIT_INFO_KHR, + p_next: ::std::ptr::null(), + semaphore: Semaphore::default(), + value: u64::default(), + stage_mask: PipelineStageFlags2KHR::default(), + device_index: u32::default(), + } + } +} +impl SemaphoreSubmitInfoKHR { + pub fn builder<'a>() -> SemaphoreSubmitInfoKHRBuilder<'a> { + SemaphoreSubmitInfoKHRBuilder { + inner: SemaphoreSubmitInfoKHR::default(), + marker: ::std::marker::PhantomData, + } + } +} +#[repr(transparent)] +pub struct SemaphoreSubmitInfoKHRBuilder<'a> { + inner: SemaphoreSubmitInfoKHR, + marker: ::std::marker::PhantomData<&'a ()>, +} +pub unsafe trait ExtendsSemaphoreSubmitInfoKHR {} +impl<'a> ::std::ops::Deref for SemaphoreSubmitInfoKHRBuilder<'a> { + type Target = SemaphoreSubmitInfoKHR; + fn deref(&self) -> &Self::Target { + &self.inner + } +} +impl<'a> ::std::ops::DerefMut for SemaphoreSubmitInfoKHRBuilder<'a> { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.inner + } +} +impl<'a> SemaphoreSubmitInfoKHRBuilder<'a> { + pub fn semaphore(mut self, semaphore: Semaphore) -> Self { + self.inner.semaphore = semaphore; + self + } + pub fn value(mut self, value: u64) -> Self { + self.inner.value = value; + self + } + pub fn stage_mask(mut self, stage_mask: PipelineStageFlags2KHR) -> Self { + self.inner.stage_mask = stage_mask; + self + } + pub fn device_index(mut self, device_index: u32) -> Self { + self.inner.device_index = device_index; + self + } + #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] + #[doc = r" method only exists on structs that can be passed to a function directly. Only"] + #[doc = r" valid extension structs can be pushed into the chain."] + #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"] + #[doc = r" chain will look like `A -> D -> B -> C`."] + pub fn push_next(mut self, next: &'a mut T) -> Self { + unsafe { + let next_ptr = next as *mut T as *mut BaseOutStructure; + let last_next = ptr_chain_iter(next).last().unwrap(); + (*last_next).p_next = self.inner.p_next as _; + self.inner.p_next = next_ptr as _; + } + self + } + #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"] + #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"] + #[doc = r" so references to builders can be passed directly to Vulkan functions."] + pub fn build(self) -> SemaphoreSubmitInfoKHR { + self.inner + } +} +#[repr(C)] +#[derive(Copy, Clone, Debug)] +#[doc = ""] +pub struct CommandBufferSubmitInfoKHR { + pub s_type: StructureType, + pub p_next: *const c_void, + pub command_buffer: CommandBuffer, + pub device_mask: u32, +} +impl ::std::default::Default for CommandBufferSubmitInfoKHR { + fn default() -> CommandBufferSubmitInfoKHR { + CommandBufferSubmitInfoKHR { + s_type: StructureType::COMMAND_BUFFER_SUBMIT_INFO_KHR, + p_next: ::std::ptr::null(), + command_buffer: CommandBuffer::default(), + device_mask: u32::default(), + } + } +} +impl CommandBufferSubmitInfoKHR { + pub fn builder<'a>() -> CommandBufferSubmitInfoKHRBuilder<'a> { + CommandBufferSubmitInfoKHRBuilder { + inner: CommandBufferSubmitInfoKHR::default(), + marker: ::std::marker::PhantomData, + } + } +} +#[repr(transparent)] +pub struct CommandBufferSubmitInfoKHRBuilder<'a> { + inner: CommandBufferSubmitInfoKHR, + marker: ::std::marker::PhantomData<&'a ()>, +} +pub unsafe trait ExtendsCommandBufferSubmitInfoKHR {} +impl<'a> ::std::ops::Deref for CommandBufferSubmitInfoKHRBuilder<'a> { + type Target = CommandBufferSubmitInfoKHR; + fn deref(&self) -> &Self::Target { + &self.inner + } +} +impl<'a> ::std::ops::DerefMut for CommandBufferSubmitInfoKHRBuilder<'a> { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.inner + } +} +impl<'a> CommandBufferSubmitInfoKHRBuilder<'a> { + pub fn command_buffer(mut self, command_buffer: CommandBuffer) -> Self { + self.inner.command_buffer = command_buffer; + self + } + pub fn device_mask(mut self, device_mask: u32) -> Self { + self.inner.device_mask = device_mask; + self + } + #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] + #[doc = r" method only exists on structs that can be passed to a function directly. Only"] + #[doc = r" valid extension structs can be pushed into the chain."] + #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"] + #[doc = r" chain will look like `A -> D -> B -> C`."] + pub fn push_next(mut self, next: &'a mut T) -> Self { + unsafe { + let next_ptr = next as *mut T as *mut BaseOutStructure; + let last_next = ptr_chain_iter(next).last().unwrap(); + (*last_next).p_next = self.inner.p_next as _; + self.inner.p_next = next_ptr as _; + } + self + } + #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"] + #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"] + #[doc = r" so references to builders can be passed directly to Vulkan functions."] + pub fn build(self) -> CommandBufferSubmitInfoKHR { + self.inner + } +} +#[repr(C)] +#[derive(Copy, Clone, Debug)] +#[doc = ""] +pub struct SubmitInfo2KHR { + pub s_type: StructureType, + pub p_next: *const c_void, + pub flags: SubmitFlagsKHR, + pub wait_semaphore_info_count: u32, + pub p_wait_semaphore_infos: *const SemaphoreSubmitInfoKHR, + pub command_buffer_info_count: u32, + pub p_command_buffer_infos: *const CommandBufferSubmitInfoKHR, + pub signal_semaphore_info_count: u32, + pub p_signal_semaphore_infos: *const SemaphoreSubmitInfoKHR, +} +impl ::std::default::Default for SubmitInfo2KHR { + fn default() -> SubmitInfo2KHR { + SubmitInfo2KHR { + s_type: StructureType::SUBMIT_INFO_2_KHR, + p_next: ::std::ptr::null(), + flags: SubmitFlagsKHR::default(), + wait_semaphore_info_count: u32::default(), + p_wait_semaphore_infos: ::std::ptr::null(), + command_buffer_info_count: u32::default(), + p_command_buffer_infos: ::std::ptr::null(), + signal_semaphore_info_count: u32::default(), + p_signal_semaphore_infos: ::std::ptr::null(), + } + } +} +impl SubmitInfo2KHR { + pub fn builder<'a>() -> SubmitInfo2KHRBuilder<'a> { + SubmitInfo2KHRBuilder { + inner: SubmitInfo2KHR::default(), + marker: ::std::marker::PhantomData, + } + } +} +#[repr(transparent)] +pub struct SubmitInfo2KHRBuilder<'a> { + inner: SubmitInfo2KHR, + marker: ::std::marker::PhantomData<&'a ()>, +} +pub unsafe trait ExtendsSubmitInfo2KHR {} +impl<'a> ::std::ops::Deref for SubmitInfo2KHRBuilder<'a> { + type Target = SubmitInfo2KHR; + fn deref(&self) -> &Self::Target { + &self.inner + } +} +impl<'a> ::std::ops::DerefMut for SubmitInfo2KHRBuilder<'a> { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.inner + } +} +impl<'a> SubmitInfo2KHRBuilder<'a> { + pub fn flags(mut self, flags: SubmitFlagsKHR) -> Self { + self.inner.flags = flags; + self + } + pub fn wait_semaphore_infos( + mut self, + wait_semaphore_infos: &'a [SemaphoreSubmitInfoKHR], + ) -> Self { + self.inner.wait_semaphore_info_count = wait_semaphore_infos.len() as _; + self.inner.p_wait_semaphore_infos = wait_semaphore_infos.as_ptr(); + self + } + pub fn command_buffer_infos( + mut self, + command_buffer_infos: &'a [CommandBufferSubmitInfoKHR], + ) -> Self { + self.inner.command_buffer_info_count = command_buffer_infos.len() as _; + self.inner.p_command_buffer_infos = command_buffer_infos.as_ptr(); + self + } + pub fn signal_semaphore_infos( + mut self, + signal_semaphore_infos: &'a [SemaphoreSubmitInfoKHR], + ) -> Self { + self.inner.signal_semaphore_info_count = signal_semaphore_infos.len() as _; + self.inner.p_signal_semaphore_infos = signal_semaphore_infos.as_ptr(); + self + } + #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] + #[doc = r" method only exists on structs that can be passed to a function directly. Only"] + #[doc = r" valid extension structs can be pushed into the chain."] + #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"] + #[doc = r" chain will look like `A -> D -> B -> C`."] + pub fn push_next(mut self, next: &'a mut T) -> Self { + unsafe { + let next_ptr = next as *mut T as *mut BaseOutStructure; + let last_next = ptr_chain_iter(next).last().unwrap(); + (*last_next).p_next = self.inner.p_next as _; + self.inner.p_next = next_ptr as _; + } + self + } + #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"] + #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"] + #[doc = r" so references to builders can be passed directly to Vulkan functions."] + pub fn build(self) -> SubmitInfo2KHR { + self.inner + } +} +#[repr(C)] +#[derive(Copy, Clone, Debug)] +#[doc = ""] +pub struct QueueFamilyCheckpointProperties2NV { + pub s_type: StructureType, + pub p_next: *mut c_void, + pub checkpoint_execution_stage_mask: PipelineStageFlags2KHR, +} +impl ::std::default::Default for QueueFamilyCheckpointProperties2NV { + fn default() -> QueueFamilyCheckpointProperties2NV { + QueueFamilyCheckpointProperties2NV { + s_type: StructureType::QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV, + p_next: ::std::ptr::null_mut(), + checkpoint_execution_stage_mask: PipelineStageFlags2KHR::default(), + } + } +} +impl QueueFamilyCheckpointProperties2NV { + pub fn builder<'a>() -> QueueFamilyCheckpointProperties2NVBuilder<'a> { + QueueFamilyCheckpointProperties2NVBuilder { + inner: QueueFamilyCheckpointProperties2NV::default(), + marker: ::std::marker::PhantomData, + } + } +} +#[repr(transparent)] +pub struct QueueFamilyCheckpointProperties2NVBuilder<'a> { + inner: QueueFamilyCheckpointProperties2NV, + marker: ::std::marker::PhantomData<&'a ()>, +} +unsafe impl ExtendsQueueFamilyProperties2 for QueueFamilyCheckpointProperties2NVBuilder<'_> {} +unsafe impl ExtendsQueueFamilyProperties2 for QueueFamilyCheckpointProperties2NV {} +impl<'a> ::std::ops::Deref for QueueFamilyCheckpointProperties2NVBuilder<'a> { + type Target = QueueFamilyCheckpointProperties2NV; + fn deref(&self) -> &Self::Target { + &self.inner + } +} +impl<'a> ::std::ops::DerefMut for QueueFamilyCheckpointProperties2NVBuilder<'a> { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.inner + } +} +impl<'a> QueueFamilyCheckpointProperties2NVBuilder<'a> { + pub fn checkpoint_execution_stage_mask( + mut self, + checkpoint_execution_stage_mask: PipelineStageFlags2KHR, + ) -> Self { + self.inner.checkpoint_execution_stage_mask = checkpoint_execution_stage_mask; + self + } + #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"] + #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"] + #[doc = r" so references to builders can be passed directly to Vulkan functions."] + pub fn build(self) -> QueueFamilyCheckpointProperties2NV { + self.inner + } +} +#[repr(C)] +#[derive(Copy, Clone, Debug)] +#[doc = ""] +pub struct CheckpointData2NV { + pub s_type: StructureType, + pub p_next: *mut c_void, + pub stage: PipelineStageFlags2KHR, + pub p_checkpoint_marker: *mut c_void, +} +impl ::std::default::Default for CheckpointData2NV { + fn default() -> CheckpointData2NV { + CheckpointData2NV { + s_type: StructureType::CHECKPOINT_DATA_2_NV, + p_next: ::std::ptr::null_mut(), + stage: PipelineStageFlags2KHR::default(), + p_checkpoint_marker: ::std::ptr::null_mut(), + } + } +} +impl CheckpointData2NV { + pub fn builder<'a>() -> CheckpointData2NVBuilder<'a> { + CheckpointData2NVBuilder { + inner: CheckpointData2NV::default(), + marker: ::std::marker::PhantomData, + } + } +} +#[repr(transparent)] +pub struct CheckpointData2NVBuilder<'a> { + inner: CheckpointData2NV, + marker: ::std::marker::PhantomData<&'a ()>, +} +pub unsafe trait ExtendsCheckpointData2NV {} +impl<'a> ::std::ops::Deref for CheckpointData2NVBuilder<'a> { + type Target = CheckpointData2NV; + fn deref(&self) -> &Self::Target { + &self.inner + } +} +impl<'a> ::std::ops::DerefMut for CheckpointData2NVBuilder<'a> { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.inner + } +} +impl<'a> CheckpointData2NVBuilder<'a> { + pub fn stage(mut self, stage: PipelineStageFlags2KHR) -> Self { + self.inner.stage = stage; + self + } + pub fn checkpoint_marker(mut self, checkpoint_marker: *mut c_void) -> Self { + self.inner.p_checkpoint_marker = checkpoint_marker; + self + } + #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] + #[doc = r" method only exists on structs that can be passed to a function directly. Only"] + #[doc = r" valid extension structs can be pushed into the chain."] + #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"] + #[doc = r" chain will look like `A -> D -> B -> C`."] + pub fn push_next(mut self, next: &'a mut T) -> Self { + unsafe { + let next_ptr = next as *mut T as *mut BaseOutStructure; + let last_next = ptr_chain_iter(next).last().unwrap(); + (*last_next).p_next = self.inner.p_next as _; + self.inner.p_next = next_ptr as _; + } + self + } + #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"] + #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"] + #[doc = r" so references to builders can be passed directly to Vulkan functions."] + pub fn build(self) -> CheckpointData2NV { + self.inner + } +} +#[repr(C)] +#[derive(Copy, Clone, Debug)] +#[doc = ""] +pub struct PhysicalDeviceSynchronization2FeaturesKHR { + pub s_type: StructureType, + pub p_next: *mut c_void, + pub synchronization2: Bool32, +} +impl ::std::default::Default for PhysicalDeviceSynchronization2FeaturesKHR { + fn default() -> PhysicalDeviceSynchronization2FeaturesKHR { + PhysicalDeviceSynchronization2FeaturesKHR { + s_type: StructureType::PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR, + p_next: ::std::ptr::null_mut(), + synchronization2: Bool32::default(), + } + } +} +impl PhysicalDeviceSynchronization2FeaturesKHR { + pub fn builder<'a>() -> PhysicalDeviceSynchronization2FeaturesKHRBuilder<'a> { + PhysicalDeviceSynchronization2FeaturesKHRBuilder { + inner: PhysicalDeviceSynchronization2FeaturesKHR::default(), + marker: ::std::marker::PhantomData, + } + } +} +#[repr(transparent)] +pub struct PhysicalDeviceSynchronization2FeaturesKHRBuilder<'a> { + inner: PhysicalDeviceSynchronization2FeaturesKHR, + marker: ::std::marker::PhantomData<&'a ()>, +} +unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceSynchronization2FeaturesKHRBuilder<'_> {} +unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceSynchronization2FeaturesKHR {} +impl<'a> ::std::ops::Deref for PhysicalDeviceSynchronization2FeaturesKHRBuilder<'a> { + type Target = PhysicalDeviceSynchronization2FeaturesKHR; + fn deref(&self) -> &Self::Target { + &self.inner + } +} +impl<'a> ::std::ops::DerefMut for PhysicalDeviceSynchronization2FeaturesKHRBuilder<'a> { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.inner + } +} +impl<'a> PhysicalDeviceSynchronization2FeaturesKHRBuilder<'a> { + pub fn synchronization2(mut self, synchronization2: bool) -> Self { + self.inner.synchronization2 = synchronization2.into(); + self + } + #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"] + #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"] + #[doc = r" so references to builders can be passed directly to Vulkan functions."] + pub fn build(self) -> PhysicalDeviceSynchronization2FeaturesKHR { + self.inner + } +} diff --git a/ash/src/vk/extensions.rs b/ash/src/vk/extensions.rs index b07484c..3e1e5f9 100644 --- a/ash/src/vk/extensions.rs +++ b/ash/src/vk/extensions.rs @@ -23254,28 +23254,526 @@ impl AmdExtension314Fn { AmdExtension314Fn {} } } -impl AmdExtension315Fn { +impl KhrSynchronization2Fn { pub fn name() -> &'static ::std::ffi::CStr { - ::std::ffi::CStr::from_bytes_with_nul(b"VK_AMD_extension_315\0") + ::std::ffi::CStr::from_bytes_with_nul(b"VK_KHR_synchronization2\0") .expect("Wrong extension string") } - pub const SPEC_VERSION: u32 = 0u32; + pub const SPEC_VERSION: u32 = 1u32; } -pub struct AmdExtension315Fn {} -unsafe impl Send for AmdExtension315Fn {} -unsafe impl Sync for AmdExtension315Fn {} -impl ::std::clone::Clone for AmdExtension315Fn { +#[allow(non_camel_case_types)] +pub type PFN_vkCmdSetEvent2KHR = extern "system" fn( + command_buffer: CommandBuffer, + event: Event, + p_dependency_info: *const DependencyInfoKHR, +); +#[allow(non_camel_case_types)] +pub type PFN_vkCmdResetEvent2KHR = extern "system" fn( + command_buffer: CommandBuffer, + event: Event, + stage_mask: PipelineStageFlags2KHR, +); +#[allow(non_camel_case_types)] +pub type PFN_vkCmdWaitEvents2KHR = extern "system" fn( + command_buffer: CommandBuffer, + event_count: u32, + p_events: *const Event, + p_dependency_infos: *const DependencyInfoKHR, +); +#[allow(non_camel_case_types)] +pub type PFN_vkCmdPipelineBarrier2KHR = + extern "system" fn(command_buffer: CommandBuffer, p_dependency_info: *const DependencyInfoKHR); +#[allow(non_camel_case_types)] +pub type PFN_vkCmdWriteTimestamp2KHR = extern "system" fn( + command_buffer: CommandBuffer, + stage: PipelineStageFlags2KHR, + query_pool: QueryPool, + query: u32, +); +#[allow(non_camel_case_types)] +pub type PFN_vkQueueSubmit2KHR = extern "system" fn( + queue: Queue, + submit_count: u32, + p_submits: *const SubmitInfo2KHR, + fence: Fence, +) -> Result; +#[allow(non_camel_case_types)] +pub type PFN_vkCmdWriteBufferMarker2AMD = extern "system" fn( + command_buffer: CommandBuffer, + stage: PipelineStageFlags2KHR, + dst_buffer: Buffer, + dst_offset: DeviceSize, + marker: u32, +); +#[allow(non_camel_case_types)] +pub type PFN_vkGetQueueCheckpointData2NV = extern "system" fn( + queue: Queue, + p_checkpoint_data_count: *mut u32, + p_checkpoint_data: *mut CheckpointData2NV, +); +pub struct KhrSynchronization2Fn { + pub cmd_set_event2_khr: extern "system" fn( + command_buffer: CommandBuffer, + event: Event, + p_dependency_info: *const DependencyInfoKHR, + ), + pub cmd_reset_event2_khr: extern "system" fn( + command_buffer: CommandBuffer, + event: Event, + stage_mask: PipelineStageFlags2KHR, + ), + pub cmd_wait_events2_khr: extern "system" fn( + command_buffer: CommandBuffer, + event_count: u32, + p_events: *const Event, + p_dependency_infos: *const DependencyInfoKHR, + ), + pub cmd_pipeline_barrier2_khr: extern "system" fn( + command_buffer: CommandBuffer, + p_dependency_info: *const DependencyInfoKHR, + ), + pub cmd_write_timestamp2_khr: extern "system" fn( + command_buffer: CommandBuffer, + stage: PipelineStageFlags2KHR, + query_pool: QueryPool, + query: u32, + ), + pub queue_submit2_khr: extern "system" fn( + queue: Queue, + submit_count: u32, + p_submits: *const SubmitInfo2KHR, + fence: Fence, + ) -> Result, + pub cmd_write_buffer_marker2_amd: extern "system" fn( + command_buffer: CommandBuffer, + stage: PipelineStageFlags2KHR, + dst_buffer: Buffer, + dst_offset: DeviceSize, + marker: u32, + ), + pub get_queue_checkpoint_data2_nv: extern "system" fn( + queue: Queue, + p_checkpoint_data_count: *mut u32, + p_checkpoint_data: *mut CheckpointData2NV, + ), +} +unsafe impl Send for KhrSynchronization2Fn {} +unsafe impl Sync for KhrSynchronization2Fn {} +impl ::std::clone::Clone for KhrSynchronization2Fn { fn clone(&self) -> Self { - AmdExtension315Fn {} + KhrSynchronization2Fn { + cmd_set_event2_khr: self.cmd_set_event2_khr, + cmd_reset_event2_khr: self.cmd_reset_event2_khr, + cmd_wait_events2_khr: self.cmd_wait_events2_khr, + cmd_pipeline_barrier2_khr: self.cmd_pipeline_barrier2_khr, + cmd_write_timestamp2_khr: self.cmd_write_timestamp2_khr, + queue_submit2_khr: self.queue_submit2_khr, + cmd_write_buffer_marker2_amd: self.cmd_write_buffer_marker2_amd, + get_queue_checkpoint_data2_nv: self.get_queue_checkpoint_data2_nv, + } } } -impl AmdExtension315Fn { +impl KhrSynchronization2Fn { pub fn load(mut _f: F) -> Self where F: FnMut(&::std::ffi::CStr) -> *const c_void, { - AmdExtension315Fn {} + KhrSynchronization2Fn { + cmd_set_event2_khr: unsafe { + extern "system" fn cmd_set_event2_khr( + _command_buffer: CommandBuffer, + _event: Event, + _p_dependency_info: *const DependencyInfoKHR, + ) { + panic!(concat!("Unable to load ", stringify!(cmd_set_event2_khr))) + } + let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetEvent2KHR\0"); + let val = _f(cname); + if val.is_null() { + cmd_set_event2_khr + } else { + ::std::mem::transmute(val) + } + }, + cmd_reset_event2_khr: unsafe { + extern "system" fn cmd_reset_event2_khr( + _command_buffer: CommandBuffer, + _event: Event, + _stage_mask: PipelineStageFlags2KHR, + ) { + panic!(concat!("Unable to load ", stringify!(cmd_reset_event2_khr))) + } + let cname = + ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdResetEvent2KHR\0"); + let val = _f(cname); + if val.is_null() { + cmd_reset_event2_khr + } else { + ::std::mem::transmute(val) + } + }, + cmd_wait_events2_khr: unsafe { + extern "system" fn cmd_wait_events2_khr( + _command_buffer: CommandBuffer, + _event_count: u32, + _p_events: *const Event, + _p_dependency_infos: *const DependencyInfoKHR, + ) { + panic!(concat!("Unable to load ", stringify!(cmd_wait_events2_khr))) + } + let cname = + ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdWaitEvents2KHR\0"); + let val = _f(cname); + if val.is_null() { + cmd_wait_events2_khr + } else { + ::std::mem::transmute(val) + } + }, + cmd_pipeline_barrier2_khr: unsafe { + extern "system" fn cmd_pipeline_barrier2_khr( + _command_buffer: CommandBuffer, + _p_dependency_info: *const DependencyInfoKHR, + ) { + panic!(concat!( + "Unable to load ", + stringify!(cmd_pipeline_barrier2_khr) + )) + } + let cname = + ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdPipelineBarrier2KHR\0"); + let val = _f(cname); + if val.is_null() { + cmd_pipeline_barrier2_khr + } else { + ::std::mem::transmute(val) + } + }, + cmd_write_timestamp2_khr: unsafe { + extern "system" fn cmd_write_timestamp2_khr( + _command_buffer: CommandBuffer, + _stage: PipelineStageFlags2KHR, + _query_pool: QueryPool, + _query: u32, + ) { + panic!(concat!( + "Unable to load ", + stringify!(cmd_write_timestamp2_khr) + )) + } + let cname = + ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdWriteTimestamp2KHR\0"); + let val = _f(cname); + if val.is_null() { + cmd_write_timestamp2_khr + } else { + ::std::mem::transmute(val) + } + }, + queue_submit2_khr: unsafe { + extern "system" fn queue_submit2_khr( + _queue: Queue, + _submit_count: u32, + _p_submits: *const SubmitInfo2KHR, + _fence: Fence, + ) -> Result { + panic!(concat!("Unable to load ", stringify!(queue_submit2_khr))) + } + let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkQueueSubmit2KHR\0"); + let val = _f(cname); + if val.is_null() { + queue_submit2_khr + } else { + ::std::mem::transmute(val) + } + }, + cmd_write_buffer_marker2_amd: unsafe { + extern "system" fn cmd_write_buffer_marker2_amd( + _command_buffer: CommandBuffer, + _stage: PipelineStageFlags2KHR, + _dst_buffer: Buffer, + _dst_offset: DeviceSize, + _marker: u32, + ) { + panic!(concat!( + "Unable to load ", + stringify!(cmd_write_buffer_marker2_amd) + )) + } + let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked( + b"vkCmdWriteBufferMarker2AMD\0", + ); + let val = _f(cname); + if val.is_null() { + cmd_write_buffer_marker2_amd + } else { + ::std::mem::transmute(val) + } + }, + get_queue_checkpoint_data2_nv: unsafe { + extern "system" fn get_queue_checkpoint_data2_nv( + _queue: Queue, + _p_checkpoint_data_count: *mut u32, + _p_checkpoint_data: *mut CheckpointData2NV, + ) { + panic!(concat!( + "Unable to load ", + stringify!(get_queue_checkpoint_data2_nv) + )) + } + let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked( + b"vkGetQueueCheckpointData2NV\0", + ); + let val = _f(cname); + if val.is_null() { + get_queue_checkpoint_data2_nv + } else { + ::std::mem::transmute(val) + } + }, + } } + #[doc = ""] + pub unsafe fn cmd_set_event2_khr( + &self, + command_buffer: CommandBuffer, + event: Event, + p_dependency_info: *const DependencyInfoKHR, + ) { + (self.cmd_set_event2_khr)(command_buffer, event, p_dependency_info) + } + #[doc = ""] + pub unsafe fn cmd_reset_event2_khr( + &self, + command_buffer: CommandBuffer, + event: Event, + stage_mask: PipelineStageFlags2KHR, + ) { + (self.cmd_reset_event2_khr)(command_buffer, event, stage_mask) + } + #[doc = ""] + pub unsafe fn cmd_wait_events2_khr( + &self, + command_buffer: CommandBuffer, + event_count: u32, + p_events: *const Event, + p_dependency_infos: *const DependencyInfoKHR, + ) { + (self.cmd_wait_events2_khr)(command_buffer, event_count, p_events, p_dependency_infos) + } + #[doc = ""] + pub unsafe fn cmd_pipeline_barrier2_khr( + &self, + command_buffer: CommandBuffer, + p_dependency_info: *const DependencyInfoKHR, + ) { + (self.cmd_pipeline_barrier2_khr)(command_buffer, p_dependency_info) + } + #[doc = ""] + pub unsafe fn cmd_write_timestamp2_khr( + &self, + command_buffer: CommandBuffer, + stage: PipelineStageFlags2KHR, + query_pool: QueryPool, + query: u32, + ) { + (self.cmd_write_timestamp2_khr)(command_buffer, stage, query_pool, query) + } + #[doc = ""] + pub unsafe fn queue_submit2_khr( + &self, + queue: Queue, + submit_count: u32, + p_submits: *const SubmitInfo2KHR, + fence: Fence, + ) -> Result { + (self.queue_submit2_khr)(queue, submit_count, p_submits, fence) + } + #[doc = ""] + pub unsafe fn cmd_write_buffer_marker2_amd( + &self, + command_buffer: CommandBuffer, + stage: PipelineStageFlags2KHR, + dst_buffer: Buffer, + dst_offset: DeviceSize, + marker: u32, + ) { + (self.cmd_write_buffer_marker2_amd)(command_buffer, stage, dst_buffer, dst_offset, marker) + } + #[doc = ""] + pub unsafe fn get_queue_checkpoint_data2_nv( + &self, + queue: Queue, + p_checkpoint_data_count: *mut u32, + p_checkpoint_data: *mut CheckpointData2NV, + ) { + (self.get_queue_checkpoint_data2_nv)(queue, p_checkpoint_data_count, p_checkpoint_data) + } +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl StructureType { + pub const MEMORY_BARRIER_2_KHR: Self = Self(1_000_314_000); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl StructureType { + pub const BUFFER_MEMORY_BARRIER_2_KHR: Self = Self(1_000_314_001); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl StructureType { + pub const IMAGE_MEMORY_BARRIER_2_KHR: Self = Self(1_000_314_002); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl StructureType { + pub const DEPENDENCY_INFO_KHR: Self = Self(1_000_314_003); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl StructureType { + pub const SUBMIT_INFO_2_KHR: Self = Self(1_000_314_004); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl StructureType { + pub const SEMAPHORE_SUBMIT_INFO_KHR: Self = Self(1_000_314_005); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl StructureType { + pub const COMMAND_BUFFER_SUBMIT_INFO_KHR: Self = Self(1_000_314_006); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl StructureType { + pub const PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR: Self = Self(1_000_314_007); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl EventCreateFlags { + pub const DEVICE_ONLY_KHR: Self = Self(0b1); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl ImageLayout { + pub const READ_ONLY_OPTIMAL_KHR: Self = Self(1_000_314_000); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl ImageLayout { + pub const ATTACHMENT_OPTIMAL_KHR: Self = Self(1_000_314_001); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl PipelineStageFlags { + pub const NONE_KHR: Self = Self(0); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl AccessFlags { + pub const NONE_KHR: Self = Self(0); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl PipelineStageFlags2KHR { + pub const TRANSFORM_FEEDBACK_EXT: Self = Self(0b1_0000_0000_0000_0000_0000_0000); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl AccessFlags2KHR { + pub const TRANSFORM_FEEDBACK_WRITE_EXT: Self = Self(0b10_0000_0000_0000_0000_0000_0000); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl AccessFlags2KHR { + pub const TRANSFORM_FEEDBACK_COUNTER_READ_EXT: Self = Self(0b100_0000_0000_0000_0000_0000_0000); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl AccessFlags2KHR { + pub const TRANSFORM_FEEDBACK_COUNTER_WRITE_EXT: Self = + Self(0b1000_0000_0000_0000_0000_0000_0000); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl PipelineStageFlags2KHR { + pub const CONDITIONAL_RENDERING_EXT: Self = Self(0b100_0000_0000_0000_0000); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl AccessFlags2KHR { + pub const CONDITIONAL_RENDERING_READ_EXT: Self = Self(0b1_0000_0000_0000_0000_0000); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl PipelineStageFlags2KHR { + pub const COMMAND_PREPROCESS_NV: Self = Self(0b10_0000_0000_0000_0000); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl AccessFlags2KHR { + pub const COMMAND_PREPROCESS_READ_NV: Self = Self(0b10_0000_0000_0000_0000); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl AccessFlags2KHR { + pub const COMMAND_PREPROCESS_WRITE_NV: Self = Self(0b100_0000_0000_0000_0000); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl PipelineStageFlags2KHR { + pub const FRAGMENT_SHADING_RATE_ATTACHMENT: Self = Self(0b100_0000_0000_0000_0000_0000); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl AccessFlags2KHR { + pub const FRAGMENT_SHADING_RATE_ATTACHMENT_READ: Self = Self(0b1000_0000_0000_0000_0000_0000); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl PipelineStageFlags2KHR { + pub const SHADING_RATE_IMAGE_NV: Self = Self::FRAGMENT_SHADING_RATE_ATTACHMENT; +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl AccessFlags2KHR { + pub const SHADING_RATE_IMAGE_READ_NV: Self = Self::FRAGMENT_SHADING_RATE_ATTACHMENT_READ; +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl PipelineStageFlags2KHR { + pub const ACCELERATION_STRUCTURE_BUILD: Self = Self(0b10_0000_0000_0000_0000_0000_0000); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl AccessFlags2KHR { + pub const ACCELERATION_STRUCTURE_READ: Self = Self(0b10_0000_0000_0000_0000_0000); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl AccessFlags2KHR { + pub const ACCELERATION_STRUCTURE_WRITE: Self = Self(0b100_0000_0000_0000_0000_0000); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl PipelineStageFlags2KHR { + pub const RAY_TRACING_SHADER: Self = Self(0b10_0000_0000_0000_0000_0000); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl PipelineStageFlags2KHR { + pub const RAY_TRACING_SHADER_NV: Self = Self::RAY_TRACING_SHADER; +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl PipelineStageFlags2KHR { + pub const ACCELERATION_STRUCTURE_BUILD_NV: Self = Self::ACCELERATION_STRUCTURE_BUILD; +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl AccessFlags2KHR { + pub const ACCELERATION_STRUCTURE_READ_NV: Self = Self::ACCELERATION_STRUCTURE_READ; +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl AccessFlags2KHR { + pub const ACCELERATION_STRUCTURE_WRITE_NV: Self = Self::ACCELERATION_STRUCTURE_WRITE; +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl PipelineStageFlags2KHR { + pub const FRAGMENT_DENSITY_PROCESS_EXT: Self = Self(0b1000_0000_0000_0000_0000_0000); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl AccessFlags2KHR { + pub const FRAGMENT_DENSITY_MAP_READ_EXT: Self = Self(0b1_0000_0000_0000_0000_0000_0000); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl AccessFlags2KHR { + pub const COLOR_ATTACHMENT_READ_NONCOHERENT_EXT: Self = Self(0b1000_0000_0000_0000_0000); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl PipelineStageFlags2KHR { + pub const TASK_SHADER_NV: Self = Self(0b1000_0000_0000_0000_0000); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl PipelineStageFlags2KHR { + pub const MESH_SHADER_NV: Self = Self(0b1_0000_0000_0000_0000_0000); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl StructureType { + pub const QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV: Self = Self(1_000_314_008); +} +#[doc = "Generated from 'VK_KHR_synchronization2'"] +impl StructureType { + pub const CHECKPOINT_DATA_2_NV: Self = Self(1_000_314_009); } impl AmdExtension316Fn { pub fn name() -> &'static ::std::ffi::CStr { @@ -23646,6 +24144,10 @@ impl BuildAccelerationStructureFlagsKHR { impl AccelerationStructureCreateFlagsKHR { pub const RESERVED_2_NV: Self = Self(0b100); } +#[doc = "Generated from 'VK_NV_extension_328'"] +impl PipelineCreateFlags { + pub const RESERVED_20_NV: Self = Self(0b1_0000_0000_0000_0000_0000); +} impl NvExtension329Fn { pub fn name() -> &'static ::std::ffi::CStr { ::std::ffi::CStr::from_bytes_with_nul(b"VK_NV_extension_329\0") diff --git a/generator/Cargo.toml b/generator/Cargo.toml index f19295a..1a4e332 100644 --- a/generator/Cargo.toml +++ b/generator/Cargo.toml @@ -5,13 +5,15 @@ authors = ["Maik Klein "] edition = "2018" [dependencies] -vk-parse = { version = "0.5.0", features = ["vkxml-convert"] } -vkxml = "0.3" -nom = "6.0" heck = "0.3" -proc-macro2 = "1.0" itertools = "0.10" +nom = "6.0" +once_cell = "1.7" +proc-macro2 = "1.0" quote = "1.0" +regex = "1.4" +vk-parse = { git = "https://github.com/krolli/vk-parse", rev = "a133cbb330deeb5c8cea63574cb309075e43a231", features = ["vkxml-convert"] } +vkxml = "0.3" [dependencies.syn] version = "1.0" diff --git a/generator/Vulkan-Headers b/generator/Vulkan-Headers index 9166a06..1d99b83 160000 --- a/generator/Vulkan-Headers +++ b/generator/Vulkan-Headers @@ -1 +1 @@ -Subproject commit 9166a0677e4412edbdcc774bdcd7c74cefc74ae5 +Subproject commit 1d99b835ec3cd5a7fb2f2a2dd9a615ee2d1f0101 diff --git a/generator/src/lib.rs b/generator/src/lib.rs index 0b96eb0..26d068a 100644 --- a/generator/src/lib.rs +++ b/generator/src/lib.rs @@ -1,11 +1,12 @@ #![recursion_limit = "256"] -use nom::{alt, char, do_parse, map, named, opt, tag, take_while1, terminated}; -use quote::*; - use heck::{CamelCase, ShoutySnakeCase, SnakeCase}; use itertools::Itertools; +use nom::{alt, char, do_parse, map, named, opt, tag, take_while1, terminated}; +use once_cell::sync::Lazy; use proc_macro2::{Delimiter, Group, Literal, Span, TokenStream, TokenTree}; +use quote::*; +use regex::Regex; use std::collections::{BTreeMap, HashMap, HashSet}; use std::fmt::Display; use std::hash::BuildHasher; @@ -477,7 +478,7 @@ impl quote::ToTokens for Constant { tokens.extend(rexpr.parse::()); } Constant::BitPos(pos) => { - let value = 1 << pos; + let value = 1u64 << pos; let bit_string = format!("{:b}", value); let bit_string = interleave_number('_', 4, &bit_string); syn::LitInt::new(&format!("0b{}", bit_string), Span::call_site()).to_tokens(tokens); @@ -517,7 +518,7 @@ impl Constant { match *self { Constant::Number(n) => Some(ConstVal::U64(n as u64)), Constant::Hex(ref hex) => u64::from_str_radix(&hex, 16).ok().map(ConstVal::U64), - Constant::BitPos(pos) => Some(ConstVal::U64((1 << pos) as u64)), + Constant::BitPos(pos) => Some(ConstVal::U64(1u64 << pos)), _ => None, } } @@ -1090,7 +1091,7 @@ pub fn generate_extension_constants<'a>( extension_number: i64, extension_items: &'a [vk_parse::ExtensionChild], const_cache: &mut HashSet<&'a str, impl BuildHasher>, - const_values: &mut BTreeMap>, + const_values: &mut BTreeMap, ) -> TokenStream { let items = extension_items .iter() @@ -1116,6 +1117,7 @@ pub fn generate_extension_constants<'a>( const_values .get_mut(&ident) .unwrap() + .values .push(ConstantMatchInfo { ident: ext_constant.variant_ident(&extends), is_alias, @@ -1214,7 +1216,7 @@ pub fn generate_extension<'a>( extension: &'a vk_parse::Extension, cmd_map: &CommandMap<'a>, const_cache: &mut HashSet<&'a str, impl BuildHasher>, - const_values: &mut BTreeMap>, + const_values: &mut BTreeMap, cmd_aliases: &HashMap, fn_cache: &mut HashSet<&'a str, impl BuildHasher>, ) -> Option { @@ -1285,7 +1287,7 @@ pub fn generate_typedef(typedef: &vkxml::Typedef) -> TokenStream { pub fn generate_bitmask( bitmask: &vkxml::Bitmask, bitflags_cache: &mut HashSet, - const_values: &mut BTreeMap>, + const_values: &mut BTreeMap, ) -> Option { // Workaround for empty bitmask if bitmask.name.is_empty() { @@ -1302,14 +1304,15 @@ pub fn generate_bitmask( return None; }; bitflags_cache.insert(ident.clone()); - const_values.insert(ident.clone(), Vec::new()); + const_values.insert(ident.clone(), Default::default()); let khronos_link = khronos_link(&bitmask.name); + let type_ = name_to_tokens(&bitmask.basetype); Some(quote! { #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = #khronos_link] - pub struct #ident(pub(crate) Flags); - vk_bitflags_wrapped!(#ident, 0b0, Flags); + pub struct #ident(pub(crate) #type_); + vk_bitflags_wrapped!(#ident, 0b0, #type_); }) } @@ -1331,6 +1334,8 @@ fn is_enum_variant_with_typo(variant_name: &str) -> bool { ) } +static TRAILING_NUMBER: Lazy = Lazy::new(|| Regex::new("(\\d+)$").unwrap()); + pub fn variant_ident(enum_name: &str, variant_name: &str) -> Ident { let variant_name = variant_name.to_uppercase(); let _name = enum_name.replace("FlagBits", ""); @@ -1347,20 +1352,23 @@ pub fn variant_ident(enum_name: &str, variant_name: &str) -> Ident { .cloned() .unwrap_or(""); let struct_name = struct_name.strip_suffix(vendor).unwrap(); + let struct_name = TRAILING_NUMBER.replace(struct_name, "_$1"); let variant_name = variant_name .strip_suffix(vendor) .unwrap_or_else(|| variant_name.as_str()); - let new_variant_name = variant_name.strip_prefix(struct_name).unwrap_or_else(|| { - if enum_name == "VkResult" || is_enum_variant_with_typo(variant_name) { - variant_name.strip_prefix("VK").unwrap() - } else { - panic!( - "Failed to strip {} prefix from enum variant {}", - struct_name, variant_name - ) - } - }); + let new_variant_name = variant_name + .strip_prefix(struct_name.as_ref()) + .unwrap_or_else(|| { + if enum_name == "VkResult" || is_enum_variant_with_typo(variant_name) { + variant_name.strip_prefix("VK").unwrap() + } else { + panic!( + "Failed to strip {} prefix from enum variant {}", + struct_name, variant_name + ) + } + }); // Both of the above strip_prefix leave a leading `_`: let new_variant_name = new_variant_name.strip_prefix("_").unwrap(); @@ -1428,7 +1436,7 @@ pub fn bitflags_impl_block( pub fn generate_enum<'a>( enum_: &'a vk_parse::Enums, const_cache: &mut HashSet<&'a str, impl BuildHasher>, - const_values: &mut BTreeMap>, + const_values: &mut BTreeMap, bitflags_cache: &mut HashSet, ) -> EnumType { let name = enum_.name.as_ref().unwrap(); @@ -1463,7 +1471,13 @@ pub fn generate_enum<'a>( is_alias: constant.is_alias(), }); } - const_values.insert(ident.clone(), values); + const_values.insert( + ident.clone(), + ConstantTypeInfo { + values, + bitwidth: enum_.bitwidth, + }, + ); let khronos_link = khronos_link(name); @@ -1473,6 +1487,12 @@ pub fn generate_enum<'a>( .iter() .filter_map(|constant| constant.constant(name).value()) .fold(0, |acc, next| acc | next.bits()); + + let type_ = if enum_.bitwidth == Some(64u32) { + quote!(Flags64) + } else { + quote!(Flags) + }; let bit_string = format!("{:b}", all_bits); let bit_string = interleave_number('_', 4, &bit_string); let all_bits_term = syn::LitInt::new(&format!("0b{}", bit_string), Span::call_site()); @@ -1486,8 +1506,8 @@ pub fn generate_enum<'a>( #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = #khronos_link] - pub struct #ident(pub(crate) Flags); - vk_bitflags_wrapped!(#ident, #all_bits_term, Flags); + pub struct #ident(pub(crate) #type_); + vk_bitflags_wrapped!(#ident, #all_bits_term, #type_); #impl_bitflags }; EnumType::Bitflags(q) @@ -2204,7 +2224,7 @@ pub fn generate_definition( union_types: &HashSet<&str, impl BuildHasher>, root_structs: &HashSet, bitflags_cache: &mut HashSet, - const_values: &mut BTreeMap>, + const_values: &mut BTreeMap, ) -> Option { match *definition { vkxml::DefinitionsElement::Define(ref define) => Some(generate_define(define)), @@ -2329,7 +2349,7 @@ pub fn generate_constant<'a>( pub fn generate_feature_extension<'a>( registry: &'a vk_parse::Registry, const_cache: &mut HashSet<&'a str, impl BuildHasher>, - const_values: &mut BTreeMap>, + const_values: &mut BTreeMap, ) -> TokenStream { let constants = registry.0.iter().filter_map(|item| match item { vk_parse::RegistryChild::Feature(feature) => Some(generate_extension_constants( @@ -2351,10 +2371,15 @@ pub struct ConstantMatchInfo { pub is_alias: bool, } -pub fn generate_const_debugs( - const_values: &BTreeMap>, -) -> TokenStream { +#[derive(Default)] +pub struct ConstantTypeInfo { + values: Vec, + bitwidth: Option, +} + +pub fn generate_const_debugs(const_values: &BTreeMap) -> TokenStream { let impls = const_values.iter().map(|(ty, values)| { + let ConstantTypeInfo { values, bitwidth } = values; if ty.to_string().contains("Flags") { let cases = values.iter().filter_map(|value| { if value.is_alias { @@ -2365,10 +2390,17 @@ pub fn generate_const_debugs( Some(quote! { (#ty::#ident.0, #name) }) } }); + + let type_ = if bitwidth == &Some(64u32) { + quote!(Flags64) + } else { + quote!(Flags) + }; + quote! { impl fmt::Debug for #ty { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - const KNOWN: &[(Flags, &str)] = &[#(#cases),*]; + const KNOWN: &[(#type_, &str)] = &[#(#cases),*]; debug_flags(f, KNOWN, self.0) } } @@ -2401,19 +2433,28 @@ pub fn generate_const_debugs( } }); quote! { - pub(crate) fn debug_flags(f: &mut fmt::Formatter, known: &[(Flags, &'static str)], value: Flags) -> fmt::Result { + pub(crate) fn debug_flags + Copy>( + f: &mut fmt::Formatter, + known: &[(Value, &'static str)], + value: Value, + ) -> fmt::Result { let mut first = true; - let mut accum = value; - for (bit, name) in known { - if *bit != 0 && accum & *bit == *bit { - if !first { f.write_str(" | ")?; } + let mut accum = value.into(); + for &(bit, name) in known { + let bit = bit.into(); + if bit != 0 && accum & bit == bit { + if !first { + f.write_str(" | ")?; + } f.write_str(name)?; first = false; accum &= !bit; } } if accum != 0 { - if !first { f.write_str(" | ")?; } + if !first { + f.write_str(" | ")?; + } write!(f, "{:b}", accum)?; } Ok(()) @@ -2537,7 +2578,7 @@ pub fn write_source_code>(vk_xml: &Path, src_dir: P) { let mut bitflags_cache = HashSet::new(); let mut const_cache = HashSet::new(); - let mut const_values: BTreeMap> = BTreeMap::new(); + let mut const_values: BTreeMap = BTreeMap::new(); let (enum_code, bitflags_code) = spec2 .0