diff --git a/Changelog.md b/Changelog.md index 14de353..de5615c 100644 --- a/Changelog.md +++ b/Changelog.md @@ -13,6 +13,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added +- Add helper wrappers for Vulkan core 1.3 `Instance` and `Device` functions (#568) - Update Vulkan-Headers to 1.3.206 (#563) ## [0.35.2] - 2022-02-19 diff --git a/ash/src/device.rs b/ash/src/device.rs index b379fdc..1009cc4 100644 --- a/ash/src/device.rs +++ b/ash/src/device.rs @@ -14,6 +14,7 @@ pub struct Device { pub(crate) device_fn_1_0: vk::DeviceFnV1_0, pub(crate) device_fn_1_1: vk::DeviceFnV1_1, pub(crate) device_fn_1_2: vk::DeviceFnV1_2, + pub(crate) device_fn_1_3: vk::DeviceFnV1_3, } impl Device { @@ -28,6 +29,7 @@ impl Device { device_fn_1_0: vk::DeviceFnV1_0::load(load_fn), device_fn_1_1: vk::DeviceFnV1_1::load(load_fn), device_fn_1_2: vk::DeviceFnV1_2::load(load_fn), + device_fn_1_3: vk::DeviceFnV1_3::load(load_fn), } } @@ -36,6 +38,464 @@ impl Device { } } +/// Vulkan core 1.3 +#[allow(non_camel_case_types)] +impl Device { + pub fn fp_v1_3(&self) -> &vk::DeviceFnV1_3 { + &self.device_fn_1_3 + } + + /// + pub unsafe fn create_private_data_slot( + &self, + create_info: &vk::PrivateDataSlotCreateInfo, + allocation_callbacks: Option<&vk::AllocationCallbacks>, + ) -> VkResult { + let mut private_data_slot = mem::zeroed(); + self.fp_v1_3() + .create_private_data_slot( + self.handle, + create_info, + allocation_callbacks.as_raw_ptr(), + &mut private_data_slot, + ) + .result_with_success(private_data_slot) + } + + /// + pub unsafe fn destroy_private_data_slot( + &self, + private_data_slot: vk::PrivateDataSlot, + allocation_callbacks: Option<&vk::AllocationCallbacks>, + ) { + self.fp_v1_3().destroy_private_data_slot( + self.handle, + private_data_slot, + allocation_callbacks.as_raw_ptr(), + ) + } + + /// + pub unsafe fn set_private_data( + &self, + object: T, + private_data_slot: vk::PrivateDataSlot, + data: u64, + ) -> VkResult<()> { + self.fp_v1_3() + .set_private_data( + self.handle, + T::TYPE, + object.as_raw(), + private_data_slot, + data, + ) + .result() + } + + /// + pub unsafe fn get_private_data( + &self, + object: T, + private_data_slot: vk::PrivateDataSlot, + ) -> u64 { + let mut data = mem::zeroed(); + self.fp_v1_3().get_private_data( + self.handle, + T::TYPE, + object.as_raw(), + private_data_slot, + &mut data, + ); + data + } + + /// + pub unsafe fn cmd_pipeline_barrier2( + &self, + command_buffer: vk::CommandBuffer, + dependency_info: &vk::DependencyInfo, + ) { + self.fp_v1_3() + .cmd_pipeline_barrier2(command_buffer, dependency_info) + } + + /// + pub unsafe fn cmd_reset_event2( + &self, + command_buffer: vk::CommandBuffer, + event: vk::Event, + stage_mask: vk::PipelineStageFlags2, + ) { + self.fp_v1_3() + .cmd_reset_event2(command_buffer, event, stage_mask) + } + + /// + pub unsafe fn cmd_set_event2( + &self, + command_buffer: vk::CommandBuffer, + event: vk::Event, + dependency_info: &vk::DependencyInfo, + ) { + self.fp_v1_3() + .cmd_set_event2(command_buffer, event, dependency_info) + } + + /// + pub unsafe fn cmd_wait_events2( + &self, + command_buffer: vk::CommandBuffer, + events: &[vk::Event], + dependency_infos: &[vk::DependencyInfo], + ) { + assert_eq!(events.len(), dependency_infos.len()); + self.fp_v1_3().cmd_wait_events2( + command_buffer, + events.len() as u32, + events.as_ptr(), + dependency_infos.as_ptr(), + ) + } + + /// + pub unsafe fn cmd_write_timestamp2( + &self, + command_buffer: vk::CommandBuffer, + stage: vk::PipelineStageFlags2, + query_pool: vk::QueryPool, + query: u32, + ) { + self.fp_v1_3() + .cmd_write_timestamp2(command_buffer, stage, query_pool, query) + } + + /// + pub unsafe fn queue_submit2( + &self, + queue: vk::Queue, + submits: &[vk::SubmitInfo2], + fence: vk::Fence, + ) -> VkResult<()> { + self.fp_v1_3() + .queue_submit2(queue, submits.len() as u32, submits.as_ptr(), fence) + .result() + } + + /// + pub unsafe fn cmd_copy_buffer2( + &self, + command_buffer: vk::CommandBuffer, + copy_buffer_info: &vk::CopyBufferInfo2, + ) { + self.fp_v1_3() + .cmd_copy_buffer2(command_buffer, copy_buffer_info) + } + /// + pub unsafe fn cmd_copy_image2( + &self, + command_buffer: vk::CommandBuffer, + copy_image_info: &vk::CopyImageInfo2, + ) { + self.fp_v1_3() + .cmd_copy_image2(command_buffer, copy_image_info) + } + /// + pub unsafe fn cmd_copy_buffer_to_image2( + &self, + command_buffer: vk::CommandBuffer, + copy_buffer_to_image_info: &vk::CopyBufferToImageInfo2, + ) { + self.fp_v1_3() + .cmd_copy_buffer_to_image2(command_buffer, copy_buffer_to_image_info) + } + /// + pub unsafe fn cmd_copy_image_to_buffer2( + &self, + command_buffer: vk::CommandBuffer, + copy_image_to_buffer_info: &vk::CopyImageToBufferInfo2, + ) { + self.fp_v1_3() + .cmd_copy_image_to_buffer2(command_buffer, copy_image_to_buffer_info) + } + /// + pub unsafe fn cmd_blit_image2( + &self, + command_buffer: vk::CommandBuffer, + blit_image_info: &vk::BlitImageInfo2, + ) { + self.fp_v1_3() + .cmd_blit_image2(command_buffer, blit_image_info) + } + /// + pub unsafe fn cmd_resolve_image2( + &self, + command_buffer: vk::CommandBuffer, + resolve_image_info: &vk::ResolveImageInfo2, + ) { + self.fp_v1_3() + .cmd_resolve_image2(command_buffer, resolve_image_info) + } + + /// + pub unsafe fn cmd_begin_rendering( + &self, + command_buffer: vk::CommandBuffer, + rendering_info: &vk::RenderingInfo, + ) { + self.fp_v1_3() + .cmd_begin_rendering(command_buffer, rendering_info) + } + + /// + pub unsafe fn cmd_end_rendering(&self, command_buffer: vk::CommandBuffer) { + self.fp_v1_3().cmd_end_rendering(command_buffer) + } + + /// + pub unsafe fn cmd_set_cull_mode( + &self, + command_buffer: vk::CommandBuffer, + cull_mode: vk::CullModeFlags, + ) { + self.fp_v1_3().cmd_set_cull_mode(command_buffer, cull_mode) + } + + /// + pub unsafe fn cmd_set_front_face( + &self, + command_buffer: vk::CommandBuffer, + front_face: vk::FrontFace, + ) { + self.fp_v1_3() + .cmd_set_front_face(command_buffer, front_face) + } + + /// + pub unsafe fn cmd_set_primitive_topology( + &self, + command_buffer: vk::CommandBuffer, + primitive_topology: vk::PrimitiveTopology, + ) { + self.fp_v1_3() + .cmd_set_primitive_topology(command_buffer, primitive_topology) + } + + /// + pub unsafe fn cmd_set_viewport_with_count( + &self, + command_buffer: vk::CommandBuffer, + viewports: &[vk::Viewport], + ) { + self.fp_v1_3().cmd_set_viewport_with_count( + command_buffer, + viewports.len() as u32, + viewports.as_ptr(), + ) + } + + /// + pub unsafe fn cmd_set_scissor_with_count( + &self, + command_buffer: vk::CommandBuffer, + scissors: &[vk::Rect2D], + ) { + self.fp_v1_3().cmd_set_scissor_with_count( + command_buffer, + scissors.len() as u32, + scissors.as_ptr(), + ) + } + + /// + pub unsafe fn cmd_bind_vertex_buffers2( + &self, + command_buffer: vk::CommandBuffer, + first_binding: u32, + buffers: &[vk::Buffer], + offsets: &[vk::DeviceSize], + sizes: Option<&[vk::DeviceSize]>, + strides: Option<&[vk::DeviceSize]>, + ) { + assert_eq!(offsets.len(), buffers.len()); + let p_sizes = if let Some(sizes) = sizes { + assert_eq!(sizes.len(), buffers.len()); + sizes.as_ptr() + } else { + ptr::null() + }; + let p_strides = if let Some(strides) = strides { + assert_eq!(strides.len(), buffers.len()); + strides.as_ptr() + } else { + ptr::null() + }; + self.fp_v1_3().cmd_bind_vertex_buffers2( + command_buffer, + first_binding, + buffers.len() as u32, + buffers.as_ptr(), + offsets.as_ptr(), + p_sizes, + p_strides, + ) + } + + /// + pub unsafe fn cmd_set_depth_test_enable( + &self, + command_buffer: vk::CommandBuffer, + depth_test_enable: bool, + ) { + self.fp_v1_3() + .cmd_set_depth_test_enable(command_buffer, depth_test_enable.into()) + } + + /// + pub unsafe fn cmd_set_depth_write_enable( + &self, + command_buffer: vk::CommandBuffer, + depth_write_enable: bool, + ) { + self.fp_v1_3() + .cmd_set_depth_write_enable(command_buffer, depth_write_enable.into()) + } + + /// + pub unsafe fn cmd_set_depth_compare_op( + &self, + command_buffer: vk::CommandBuffer, + depth_compare_op: vk::CompareOp, + ) { + self.fp_v1_3() + .cmd_set_depth_compare_op(command_buffer, depth_compare_op) + } + + /// + pub unsafe fn cmd_set_depth_bounds_test_enable( + &self, + command_buffer: vk::CommandBuffer, + depth_bounds_test_enable: bool, + ) { + self.fp_v1_3() + .cmd_set_depth_bounds_test_enable(command_buffer, depth_bounds_test_enable.into()) + } + + /// + pub unsafe fn cmd_set_stencil_test_enable( + &self, + command_buffer: vk::CommandBuffer, + stencil_test_enable: bool, + ) { + self.fp_v1_3() + .cmd_set_stencil_test_enable(command_buffer, stencil_test_enable.into()) + } + + /// + pub unsafe fn cmd_set_stencil_op( + &self, + command_buffer: vk::CommandBuffer, + face_mask: vk::StencilFaceFlags, + fail_op: vk::StencilOp, + pass_op: vk::StencilOp, + depth_fail_op: vk::StencilOp, + compare_op: vk::CompareOp, + ) { + self.fp_v1_3().cmd_set_stencil_op( + command_buffer, + face_mask, + fail_op, + pass_op, + depth_fail_op, + compare_op, + ) + } + + /// + pub unsafe fn cmd_set_rasterizer_discard_enable( + &self, + command_buffer: vk::CommandBuffer, + rasterizer_discard_enable: bool, + ) { + self.fp_v1_3() + .cmd_set_rasterizer_discard_enable(command_buffer, rasterizer_discard_enable.into()) + } + + /// + pub unsafe fn cmd_set_depth_bias_enable( + &self, + command_buffer: vk::CommandBuffer, + depth_bias_enable: bool, + ) { + self.fp_v1_3() + .cmd_set_depth_bias_enable(command_buffer, depth_bias_enable.into()) + } + + /// + pub unsafe fn cmd_set_primitive_restart_enable( + &self, + command_buffer: vk::CommandBuffer, + primitive_restart_enable: bool, + ) { + self.fp_v1_3() + .cmd_set_primitive_restart_enable(command_buffer, primitive_restart_enable.into()) + } + + /// + pub unsafe fn get_device_buffer_memory_requirements( + &self, + create_info: &vk::DeviceBufferMemoryRequirements, + out: &mut vk::MemoryRequirements2, + ) { + self.fp_v1_3() + .get_device_buffer_memory_requirements(self.handle, create_info, out) + } + + /// + pub unsafe fn get_device_image_memory_requirements( + &self, + create_info: &vk::DeviceImageMemoryRequirements, + out: &mut vk::MemoryRequirements2, + ) { + self.fp_v1_3() + .get_device_image_memory_requirements(self.handle, create_info, out) + } + + /// Retrieve the number of elements to pass to [`get_device_image_sparse_memory_requirements()`][Self::get_device_image_sparse_memory_requirements()] + pub unsafe fn get_device_image_sparse_memory_requirements_len( + &self, + create_info: &vk::DeviceImageMemoryRequirements, + ) -> usize { + let mut count = 0; + self.fp_v1_3().get_device_image_sparse_memory_requirements( + self.handle, + create_info, + &mut count, + std::ptr::null_mut(), + ); + count as usize + } + + /// + /// + /// Call [`get_device_image_sparse_memory_requirements_len()`][Self::get_device_image_sparse_memory_requirements_len()] to query the number of elements to pass to `out`. + /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer. + pub unsafe fn get_device_image_sparse_memory_requirements( + &self, + create_info: &vk::DeviceImageMemoryRequirements, + out: &mut [vk::SparseImageMemoryRequirements2], + ) { + let mut count = out.len() as u32; + self.fp_v1_3().get_device_image_sparse_memory_requirements( + self.handle, + create_info, + &mut count, + out.as_mut_ptr(), + ); + assert_eq!(count as usize, out.len()); + } +} + /// Vulkan core 1.2 #[allow(non_camel_case_types)] impl Device { diff --git a/ash/src/entry.rs b/ash/src/entry.rs index f944a57..300ae81 100644 --- a/ash/src/entry.rs +++ b/ash/src/entry.rs @@ -22,6 +22,7 @@ pub struct Entry { entry_fn_1_0: vk::EntryFnV1_0, entry_fn_1_1: vk::EntryFnV1_1, entry_fn_1_2: vk::EntryFnV1_2, + entry_fn_1_3: vk::EntryFnV1_3, #[cfg(feature = "loaded")] _lib_guard: Option>, } @@ -148,12 +149,14 @@ impl Entry { let entry_fn_1_0 = vk::EntryFnV1_0::load(load_fn); let entry_fn_1_1 = vk::EntryFnV1_1::load(load_fn); let entry_fn_1_2 = vk::EntryFnV1_2::load(load_fn); + let entry_fn_1_3 = vk::EntryFnV1_3::load(load_fn); Self { static_fn, entry_fn_1_0, entry_fn_1_1, entry_fn_1_2, + entry_fn_1_3, #[cfg(feature = "loaded")] _lib_guard: None, } @@ -290,6 +293,14 @@ impl Entry { } } +/// Vulkan core 1.3 +#[allow(non_camel_case_types)] +impl Entry { + pub fn fp_v1_3(&self) -> &vk::EntryFnV1_3 { + &self.entry_fn_1_3 + } +} + #[cfg(feature = "linked")] #[cfg_attr(docsrs, doc(cfg(feature = "linked")))] impl Default for Entry { diff --git a/ash/src/instance.rs b/ash/src/instance.rs index 53eee50..732b73f 100644 --- a/ash/src/instance.rs +++ b/ash/src/instance.rs @@ -14,6 +14,7 @@ pub struct Instance { pub(crate) instance_fn_1_0: vk::InstanceFnV1_0, pub(crate) instance_fn_1_1: vk::InstanceFnV1_1, pub(crate) instance_fn_1_2: vk::InstanceFnV1_2, + pub(crate) instance_fn_1_3: vk::InstanceFnV1_3, } impl Instance { @@ -28,6 +29,7 @@ impl Instance { instance_fn_1_0: vk::InstanceFnV1_0::load(load_fn), instance_fn_1_1: vk::InstanceFnV1_1::load(load_fn), instance_fn_1_2: vk::InstanceFnV1_2::load(load_fn), + instance_fn_1_3: vk::InstanceFnV1_3::load(load_fn), } } @@ -36,6 +38,42 @@ impl Instance { } } +/// Vulkan core 1.3 +#[allow(non_camel_case_types)] +impl Instance { + pub fn fp_v1_3(&self) -> &vk::InstanceFnV1_3 { + &self.instance_fn_1_3 + } + + /// Retrieve the number of elements to pass to [`get_physical_device_tool_properties()`][Self::get_physical_device_tool_properties()] + pub unsafe fn get_physical_device_tool_properties_len( + &self, + physical_device: vk::PhysicalDevice, + ) -> VkResult { + let mut count = 0; + self.instance_fn_1_3 + .get_physical_device_tool_properties(physical_device, &mut count, ptr::null_mut()) + .result_with_success(count as usize) + } + + /// + /// + /// Call [`get_physical_device_tool_properties_len()`][Self::get_physical_device_tool_properties_len()] to query the number of elements to pass to `out`. + /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer. + pub unsafe fn get_physical_device_tool_properties( + &self, + physical_device: vk::PhysicalDevice, + out: &mut [vk::PhysicalDeviceToolProperties], + ) -> VkResult<()> { + let mut count = out.len() as u32; + self.instance_fn_1_3 + .get_physical_device_tool_properties(physical_device, &mut count, out.as_mut_ptr()) + .result()?; + assert_eq!(count as usize, out.len()); + Ok(()) + } +} + /// Vulkan core 1.2 #[allow(non_camel_case_types)] impl Instance {