move unsafe

This commit is contained in:
Alex Janka 2023-10-02 18:43:49 +11:00
parent fa501247ad
commit 520d2e148b

View file

@ -52,15 +52,15 @@ impl WindowData {
} }
pub fn resize(&mut self, width: u32, height: u32, factor: u32, window: &Window) { pub fn resize(&mut self, width: u32, height: u32, factor: u32, window: &Window) {
self.inner.resize(width, height, factor, window); unsafe { self.inner.resize(width, height, factor, window) };
} }
pub fn new_frame(&mut self, buffer: &[[u8; 4]]) { pub fn new_frame(&mut self, buffer: &[[u8; 4]]) {
self.inner.new_frame(buffer); unsafe { self.inner.new_frame(buffer) };
} }
pub fn render(&mut self) { pub fn render(&mut self) {
self.inner.render(); unsafe { self.inner.render() };
} }
} }
@ -838,184 +838,180 @@ impl VulkanWindowInner {
} }
} }
fn resize(&mut self, _width: u32, _height: u32, factor: u32, window: &Window) { unsafe fn resize(&mut self, _width: u32, _height: u32, factor: u32, window: &Window) {
*self = unsafe { Self::new(factor, window) }; *self = Self::new(factor, window);
} }
fn new_frame(&mut self, buffer: &[[u8; 4]]) { unsafe fn new_frame(&mut self, buffer: &[[u8; 4]]) {
unsafe { let image_ptr = self
let image_ptr = self .device
.device .map_memory(
.map_memory( self.image_buffer_memory,
self.image_buffer_memory, 0,
0,
self.image_buffer_memory_req.size,
vk::MemoryMapFlags::empty(),
)
.unwrap();
let mut image_slice: Align<u8> = Align::new(
image_ptr,
std::mem::align_of::<u8>() as u64,
self.image_buffer_memory_req.size, self.image_buffer_memory_req.size,
); vk::MemoryMapFlags::empty(),
)
.unwrap();
let mut image_slice: Align<u8> = Align::new(
image_ptr,
std::mem::align_of::<u8>() as u64,
self.image_buffer_memory_req.size,
);
image_slice.copy_from_slice(bytemuck::cast_slice(buffer)); image_slice.copy_from_slice(bytemuck::cast_slice(buffer));
self.device.unmap_memory(self.image_buffer_memory); self.device.unmap_memory(self.image_buffer_memory);
self.device self.device
.bind_buffer_memory(self.image_buffer, self.image_buffer_memory, 0) .bind_buffer_memory(self.image_buffer, self.image_buffer_memory, 0)
.unwrap(); .unwrap();
record_submit_commandbuffer( record_submit_commandbuffer(
&self.device, &self.device,
self.setup_command_buffer, self.setup_command_buffer,
self.setup_commands_reuse_fence, self.setup_commands_reuse_fence,
self.present_queue, self.present_queue,
&[], &[],
&[], &[],
&[], &[],
|device, texture_command_buffer| { |device, texture_command_buffer| {
let texture_barrier = vk::ImageMemoryBarrier { let texture_barrier = vk::ImageMemoryBarrier {
dst_access_mask: vk::AccessFlags::TRANSFER_WRITE, dst_access_mask: vk::AccessFlags::TRANSFER_WRITE,
new_layout: vk::ImageLayout::TRANSFER_DST_OPTIMAL, new_layout: vk::ImageLayout::TRANSFER_DST_OPTIMAL,
image: self.texture_image, image: self.texture_image,
subresource_range: vk::ImageSubresourceRange { subresource_range: vk::ImageSubresourceRange {
aspect_mask: vk::ImageAspectFlags::COLOR, aspect_mask: vk::ImageAspectFlags::COLOR,
level_count: 1, level_count: 1,
layer_count: 1, layer_count: 1,
..Default::default()
},
..Default::default() ..Default::default()
}; },
device.cmd_pipeline_barrier( ..Default::default()
texture_command_buffer, };
vk::PipelineStageFlags::BOTTOM_OF_PIPE, device.cmd_pipeline_barrier(
vk::PipelineStageFlags::TRANSFER, texture_command_buffer,
vk::DependencyFlags::empty(), vk::PipelineStageFlags::BOTTOM_OF_PIPE,
&[], vk::PipelineStageFlags::TRANSFER,
&[], vk::DependencyFlags::empty(),
&[texture_barrier], &[],
); &[],
let buffer_copy_regions = vk::BufferImageCopy::builder() &[texture_barrier],
.image_subresource( );
vk::ImageSubresourceLayers::builder() let buffer_copy_regions = vk::BufferImageCopy::builder()
.aspect_mask(vk::ImageAspectFlags::COLOR) .image_subresource(
.layer_count(1) vk::ImageSubresourceLayers::builder()
.build(), .aspect_mask(vk::ImageAspectFlags::COLOR)
) .layer_count(1)
.image_extent(self.image_extent.into()) .build(),
.build(); )
.image_extent(self.image_extent.into())
.build();
device.cmd_copy_buffer_to_image( device.cmd_copy_buffer_to_image(
texture_command_buffer, texture_command_buffer,
self.image_buffer, self.image_buffer,
self.texture_image, self.texture_image,
vk::ImageLayout::TRANSFER_DST_OPTIMAL, vk::ImageLayout::TRANSFER_DST_OPTIMAL,
&[buffer_copy_regions], &[buffer_copy_regions],
); );
let texture_barrier_end = vk::ImageMemoryBarrier { let texture_barrier_end = vk::ImageMemoryBarrier {
src_access_mask: vk::AccessFlags::TRANSFER_WRITE, src_access_mask: vk::AccessFlags::TRANSFER_WRITE,
dst_access_mask: vk::AccessFlags::SHADER_READ, dst_access_mask: vk::AccessFlags::SHADER_READ,
old_layout: vk::ImageLayout::TRANSFER_DST_OPTIMAL, old_layout: vk::ImageLayout::TRANSFER_DST_OPTIMAL,
new_layout: vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL, new_layout: vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL,
image: self.texture_image, image: self.texture_image,
subresource_range: vk::ImageSubresourceRange { subresource_range: vk::ImageSubresourceRange {
aspect_mask: vk::ImageAspectFlags::COLOR, aspect_mask: vk::ImageAspectFlags::COLOR,
level_count: 1, level_count: 1,
layer_count: 1, layer_count: 1,
..Default::default()
},
..Default::default() ..Default::default()
}; },
device.cmd_pipeline_barrier( ..Default::default()
texture_command_buffer, };
vk::PipelineStageFlags::TRANSFER, device.cmd_pipeline_barrier(
vk::PipelineStageFlags::FRAGMENT_SHADER, texture_command_buffer,
vk::DependencyFlags::empty(), vk::PipelineStageFlags::TRANSFER,
&[], vk::PipelineStageFlags::FRAGMENT_SHADER,
&[], vk::DependencyFlags::empty(),
&[texture_barrier_end], &[],
); &[],
}, &[texture_barrier_end],
); );
} },
);
} }
fn render(&mut self) { unsafe fn render(&mut self) {
unsafe { let (present_index, _) = self
let (present_index, _) = self .swapchain_loader
.swapchain_loader .acquire_next_image(
.acquire_next_image( self.swapchain,
self.swapchain, std::u64::MAX,
std::u64::MAX, self.present_complete_semaphore,
self.present_complete_semaphore, vk::Fence::null(),
vk::Fence::null(), )
) .unwrap();
.unwrap(); let clear_values = [vk::ClearValue {
let clear_values = [vk::ClearValue { color: vk::ClearColorValue {
color: vk::ClearColorValue { float32: [0.0, 0.0, 0.0, 0.0],
float32: [0.0, 0.0, 0.0, 0.0], },
}, }];
}];
let render_pass_begin_info = vk::RenderPassBeginInfo::builder() let render_pass_begin_info = vk::RenderPassBeginInfo::builder()
.render_pass(self.renderpass) .render_pass(self.renderpass)
.framebuffer(self.framebuffers[present_index as usize]) .framebuffer(self.framebuffers[present_index as usize])
.render_area(self.surface_resolution.into()) .render_area(self.surface_resolution.into())
.clear_values(&clear_values) .clear_values(&clear_values)
.build(); .build();
record_submit_commandbuffer( record_submit_commandbuffer(
&self.device, &self.device,
self.draw_command_buffer, self.draw_command_buffer,
self.draw_commands_reuse_fence, self.draw_commands_reuse_fence,
self.present_queue, self.present_queue,
&[vk::PipelineStageFlags::BOTTOM_OF_PIPE], &[vk::PipelineStageFlags::BOTTOM_OF_PIPE],
&[self.present_complete_semaphore], &[self.present_complete_semaphore],
&[self.rendering_complete_semaphore], &[self.rendering_complete_semaphore],
|device, draw_command_buffer| { |device, draw_command_buffer| {
device.cmd_begin_render_pass( device.cmd_begin_render_pass(
draw_command_buffer, draw_command_buffer,
&render_pass_begin_info, &render_pass_begin_info,
vk::SubpassContents::INLINE, vk::SubpassContents::INLINE,
); );
device.cmd_bind_descriptor_sets( device.cmd_bind_descriptor_sets(
draw_command_buffer, draw_command_buffer,
vk::PipelineBindPoint::GRAPHICS, vk::PipelineBindPoint::GRAPHICS,
self.pipeline_layout, self.pipeline_layout,
0, 0,
&self.descriptor_sets[..], &self.descriptor_sets[..],
&[], &[],
); );
device.cmd_bind_pipeline( device.cmd_bind_pipeline(
draw_command_buffer, draw_command_buffer,
vk::PipelineBindPoint::GRAPHICS, vk::PipelineBindPoint::GRAPHICS,
self.graphics_pipelines[0], self.graphics_pipelines[0],
); );
device.cmd_set_viewport(draw_command_buffer, 0, &self.viewports); device.cmd_set_viewport(draw_command_buffer, 0, &self.viewports);
device.cmd_set_scissor(draw_command_buffer, 0, &self.scissors); device.cmd_set_scissor(draw_command_buffer, 0, &self.scissors);
device.cmd_bind_vertex_buffers( device.cmd_bind_vertex_buffers(
draw_command_buffer, draw_command_buffer,
0, 0,
&[self.vertex_input_buffer], &[self.vertex_input_buffer],
&[0], &[0],
); );
device.cmd_draw(draw_command_buffer, VERTICES.len() as u32, 1, 0, 1); device.cmd_draw(draw_command_buffer, VERTICES.len() as u32, 1, 0, 1);
device.cmd_end_render_pass(draw_command_buffer); device.cmd_end_render_pass(draw_command_buffer);
}, },
); );
let present_info = vk::PresentInfoKHR { let present_info = vk::PresentInfoKHR {
wait_semaphore_count: 1, wait_semaphore_count: 1,
p_wait_semaphores: &self.rendering_complete_semaphore, p_wait_semaphores: &self.rendering_complete_semaphore,
swapchain_count: 1, swapchain_count: 1,
p_swapchains: &self.swapchain, p_swapchains: &self.swapchain,
p_image_indices: &present_index, p_image_indices: &present_index,
..Default::default() ..Default::default()
}; };
self.swapchain_loader self.swapchain_loader
.queue_present(self.present_queue, &present_info) .queue_present(self.present_queue, &present_info)
.unwrap(); .unwrap();
}
} }
} }
@ -1073,7 +1069,7 @@ impl Drop for VulkanWindowInner {
} }
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
fn record_submit_commandbuffer<F: FnOnce(&Device, vk::CommandBuffer)>( unsafe fn record_submit_commandbuffer<F: FnOnce(&Device, vk::CommandBuffer)>(
device: &Device, device: &Device,
command_buffer: vk::CommandBuffer, command_buffer: vk::CommandBuffer,
command_buffer_reuse_fence: vk::Fence, command_buffer_reuse_fence: vk::Fence,
@ -1083,47 +1079,45 @@ fn record_submit_commandbuffer<F: FnOnce(&Device, vk::CommandBuffer)>(
signal_semaphores: &[vk::Semaphore], signal_semaphores: &[vk::Semaphore],
f: F, f: F,
) { ) {
unsafe { device
device .wait_for_fences(&[command_buffer_reuse_fence], true, std::u64::MAX)
.wait_for_fences(&[command_buffer_reuse_fence], true, std::u64::MAX) .expect("Wait for fence failed.");
.expect("Wait for fence failed.");
device device
.reset_fences(&[command_buffer_reuse_fence]) .reset_fences(&[command_buffer_reuse_fence])
.expect("Reset fences failed."); .expect("Reset fences failed.");
device device
.reset_command_buffer( .reset_command_buffer(
command_buffer, command_buffer,
vk::CommandBufferResetFlags::RELEASE_RESOURCES, vk::CommandBufferResetFlags::RELEASE_RESOURCES,
) )
.expect("Reset command buffer failed."); .expect("Reset command buffer failed.");
let command_buffer_begin_info = vk::CommandBufferBeginInfo::builder() let command_buffer_begin_info = vk::CommandBufferBeginInfo::builder()
.flags(vk::CommandBufferUsageFlags::ONE_TIME_SUBMIT) .flags(vk::CommandBufferUsageFlags::ONE_TIME_SUBMIT)
.build(); .build();
device device
.begin_command_buffer(command_buffer, &command_buffer_begin_info) .begin_command_buffer(command_buffer, &command_buffer_begin_info)
.expect("Begin commandbuffer"); .expect("Begin commandbuffer");
f(device, command_buffer); f(device, command_buffer);
device device
.end_command_buffer(command_buffer) .end_command_buffer(command_buffer)
.expect("End commandbuffer"); .expect("End commandbuffer");
let command_buffers = vec![command_buffer]; let command_buffers = vec![command_buffer];
let submit_info = vk::SubmitInfo::builder() let submit_info = vk::SubmitInfo::builder()
.wait_semaphores(wait_semaphores) .wait_semaphores(wait_semaphores)
.wait_dst_stage_mask(wait_mask) .wait_dst_stage_mask(wait_mask)
.command_buffers(&command_buffers) .command_buffers(&command_buffers)
.signal_semaphores(signal_semaphores) .signal_semaphores(signal_semaphores)
.build(); .build();
device device
.queue_submit(submit_queue, &[submit_info], command_buffer_reuse_fence) .queue_submit(submit_queue, &[submit_info], command_buffer_reuse_fence)
.expect("queue submit failed."); .expect("queue submit failed.");
}
} }
fn find_memorytype_index( fn find_memorytype_index(