move unsafe

This commit is contained in:
Alex Janka 2023-10-02 18:43:49 +11:00
parent fa501247ad
commit 520d2e148b

View file

@ -52,15 +52,15 @@ impl WindowData {
}
pub fn resize(&mut self, width: u32, height: u32, factor: u32, window: &Window) {
self.inner.resize(width, height, factor, window);
unsafe { self.inner.resize(width, height, factor, window) };
}
pub fn new_frame(&mut self, buffer: &[[u8; 4]]) {
self.inner.new_frame(buffer);
unsafe { self.inner.new_frame(buffer) };
}
pub fn render(&mut self) {
self.inner.render();
unsafe { self.inner.render() };
}
}
@ -838,184 +838,180 @@ impl VulkanWindowInner {
}
}
fn resize(&mut self, _width: u32, _height: u32, factor: u32, window: &Window) {
*self = unsafe { Self::new(factor, window) };
unsafe fn resize(&mut self, _width: u32, _height: u32, factor: u32, window: &Window) {
*self = Self::new(factor, window);
}
fn new_frame(&mut self, buffer: &[[u8; 4]]) {
unsafe {
let image_ptr = self
.device
.map_memory(
self.image_buffer_memory,
0,
self.image_buffer_memory_req.size,
vk::MemoryMapFlags::empty(),
)
.unwrap();
let mut image_slice: Align<u8> = Align::new(
image_ptr,
std::mem::align_of::<u8>() as u64,
unsafe fn new_frame(&mut self, buffer: &[[u8; 4]]) {
let image_ptr = self
.device
.map_memory(
self.image_buffer_memory,
0,
self.image_buffer_memory_req.size,
);
vk::MemoryMapFlags::empty(),
)
.unwrap();
let mut image_slice: Align<u8> = Align::new(
image_ptr,
std::mem::align_of::<u8>() as u64,
self.image_buffer_memory_req.size,
);
image_slice.copy_from_slice(bytemuck::cast_slice(buffer));
image_slice.copy_from_slice(bytemuck::cast_slice(buffer));
self.device.unmap_memory(self.image_buffer_memory);
self.device
.bind_buffer_memory(self.image_buffer, self.image_buffer_memory, 0)
.unwrap();
self.device.unmap_memory(self.image_buffer_memory);
self.device
.bind_buffer_memory(self.image_buffer, self.image_buffer_memory, 0)
.unwrap();
record_submit_commandbuffer(
&self.device,
self.setup_command_buffer,
self.setup_commands_reuse_fence,
self.present_queue,
&[],
&[],
&[],
|device, texture_command_buffer| {
let texture_barrier = vk::ImageMemoryBarrier {
dst_access_mask: vk::AccessFlags::TRANSFER_WRITE,
new_layout: vk::ImageLayout::TRANSFER_DST_OPTIMAL,
image: self.texture_image,
subresource_range: vk::ImageSubresourceRange {
aspect_mask: vk::ImageAspectFlags::COLOR,
level_count: 1,
layer_count: 1,
..Default::default()
},
record_submit_commandbuffer(
&self.device,
self.setup_command_buffer,
self.setup_commands_reuse_fence,
self.present_queue,
&[],
&[],
&[],
|device, texture_command_buffer| {
let texture_barrier = vk::ImageMemoryBarrier {
dst_access_mask: vk::AccessFlags::TRANSFER_WRITE,
new_layout: vk::ImageLayout::TRANSFER_DST_OPTIMAL,
image: self.texture_image,
subresource_range: vk::ImageSubresourceRange {
aspect_mask: vk::ImageAspectFlags::COLOR,
level_count: 1,
layer_count: 1,
..Default::default()
};
device.cmd_pipeline_barrier(
texture_command_buffer,
vk::PipelineStageFlags::BOTTOM_OF_PIPE,
vk::PipelineStageFlags::TRANSFER,
vk::DependencyFlags::empty(),
&[],
&[],
&[texture_barrier],
);
let buffer_copy_regions = vk::BufferImageCopy::builder()
.image_subresource(
vk::ImageSubresourceLayers::builder()
.aspect_mask(vk::ImageAspectFlags::COLOR)
.layer_count(1)
.build(),
)
.image_extent(self.image_extent.into())
.build();
},
..Default::default()
};
device.cmd_pipeline_barrier(
texture_command_buffer,
vk::PipelineStageFlags::BOTTOM_OF_PIPE,
vk::PipelineStageFlags::TRANSFER,
vk::DependencyFlags::empty(),
&[],
&[],
&[texture_barrier],
);
let buffer_copy_regions = vk::BufferImageCopy::builder()
.image_subresource(
vk::ImageSubresourceLayers::builder()
.aspect_mask(vk::ImageAspectFlags::COLOR)
.layer_count(1)
.build(),
)
.image_extent(self.image_extent.into())
.build();
device.cmd_copy_buffer_to_image(
texture_command_buffer,
self.image_buffer,
self.texture_image,
vk::ImageLayout::TRANSFER_DST_OPTIMAL,
&[buffer_copy_regions],
);
let texture_barrier_end = vk::ImageMemoryBarrier {
src_access_mask: vk::AccessFlags::TRANSFER_WRITE,
dst_access_mask: vk::AccessFlags::SHADER_READ,
old_layout: vk::ImageLayout::TRANSFER_DST_OPTIMAL,
new_layout: vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL,
image: self.texture_image,
subresource_range: vk::ImageSubresourceRange {
aspect_mask: vk::ImageAspectFlags::COLOR,
level_count: 1,
layer_count: 1,
..Default::default()
},
device.cmd_copy_buffer_to_image(
texture_command_buffer,
self.image_buffer,
self.texture_image,
vk::ImageLayout::TRANSFER_DST_OPTIMAL,
&[buffer_copy_regions],
);
let texture_barrier_end = vk::ImageMemoryBarrier {
src_access_mask: vk::AccessFlags::TRANSFER_WRITE,
dst_access_mask: vk::AccessFlags::SHADER_READ,
old_layout: vk::ImageLayout::TRANSFER_DST_OPTIMAL,
new_layout: vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL,
image: self.texture_image,
subresource_range: vk::ImageSubresourceRange {
aspect_mask: vk::ImageAspectFlags::COLOR,
level_count: 1,
layer_count: 1,
..Default::default()
};
device.cmd_pipeline_barrier(
texture_command_buffer,
vk::PipelineStageFlags::TRANSFER,
vk::PipelineStageFlags::FRAGMENT_SHADER,
vk::DependencyFlags::empty(),
&[],
&[],
&[texture_barrier_end],
);
},
);
}
},
..Default::default()
};
device.cmd_pipeline_barrier(
texture_command_buffer,
vk::PipelineStageFlags::TRANSFER,
vk::PipelineStageFlags::FRAGMENT_SHADER,
vk::DependencyFlags::empty(),
&[],
&[],
&[texture_barrier_end],
);
},
);
}
fn render(&mut self) {
unsafe {
let (present_index, _) = self
.swapchain_loader
.acquire_next_image(
self.swapchain,
std::u64::MAX,
self.present_complete_semaphore,
vk::Fence::null(),
)
.unwrap();
let clear_values = [vk::ClearValue {
color: vk::ClearColorValue {
float32: [0.0, 0.0, 0.0, 0.0],
},
}];
unsafe fn render(&mut self) {
let (present_index, _) = self
.swapchain_loader
.acquire_next_image(
self.swapchain,
std::u64::MAX,
self.present_complete_semaphore,
vk::Fence::null(),
)
.unwrap();
let clear_values = [vk::ClearValue {
color: vk::ClearColorValue {
float32: [0.0, 0.0, 0.0, 0.0],
},
}];
let render_pass_begin_info = vk::RenderPassBeginInfo::builder()
.render_pass(self.renderpass)
.framebuffer(self.framebuffers[present_index as usize])
.render_area(self.surface_resolution.into())
.clear_values(&clear_values)
.build();
let render_pass_begin_info = vk::RenderPassBeginInfo::builder()
.render_pass(self.renderpass)
.framebuffer(self.framebuffers[present_index as usize])
.render_area(self.surface_resolution.into())
.clear_values(&clear_values)
.build();
record_submit_commandbuffer(
&self.device,
self.draw_command_buffer,
self.draw_commands_reuse_fence,
self.present_queue,
&[vk::PipelineStageFlags::BOTTOM_OF_PIPE],
&[self.present_complete_semaphore],
&[self.rendering_complete_semaphore],
|device, draw_command_buffer| {
device.cmd_begin_render_pass(
draw_command_buffer,
&render_pass_begin_info,
vk::SubpassContents::INLINE,
);
device.cmd_bind_descriptor_sets(
draw_command_buffer,
vk::PipelineBindPoint::GRAPHICS,
self.pipeline_layout,
0,
&self.descriptor_sets[..],
&[],
);
device.cmd_bind_pipeline(
draw_command_buffer,
vk::PipelineBindPoint::GRAPHICS,
self.graphics_pipelines[0],
);
device.cmd_set_viewport(draw_command_buffer, 0, &self.viewports);
device.cmd_set_scissor(draw_command_buffer, 0, &self.scissors);
device.cmd_bind_vertex_buffers(
draw_command_buffer,
0,
&[self.vertex_input_buffer],
&[0],
);
device.cmd_draw(draw_command_buffer, VERTICES.len() as u32, 1, 0, 1);
device.cmd_end_render_pass(draw_command_buffer);
},
);
let present_info = vk::PresentInfoKHR {
wait_semaphore_count: 1,
p_wait_semaphores: &self.rendering_complete_semaphore,
swapchain_count: 1,
p_swapchains: &self.swapchain,
p_image_indices: &present_index,
..Default::default()
};
self.swapchain_loader
.queue_present(self.present_queue, &present_info)
.unwrap();
}
record_submit_commandbuffer(
&self.device,
self.draw_command_buffer,
self.draw_commands_reuse_fence,
self.present_queue,
&[vk::PipelineStageFlags::BOTTOM_OF_PIPE],
&[self.present_complete_semaphore],
&[self.rendering_complete_semaphore],
|device, draw_command_buffer| {
device.cmd_begin_render_pass(
draw_command_buffer,
&render_pass_begin_info,
vk::SubpassContents::INLINE,
);
device.cmd_bind_descriptor_sets(
draw_command_buffer,
vk::PipelineBindPoint::GRAPHICS,
self.pipeline_layout,
0,
&self.descriptor_sets[..],
&[],
);
device.cmd_bind_pipeline(
draw_command_buffer,
vk::PipelineBindPoint::GRAPHICS,
self.graphics_pipelines[0],
);
device.cmd_set_viewport(draw_command_buffer, 0, &self.viewports);
device.cmd_set_scissor(draw_command_buffer, 0, &self.scissors);
device.cmd_bind_vertex_buffers(
draw_command_buffer,
0,
&[self.vertex_input_buffer],
&[0],
);
device.cmd_draw(draw_command_buffer, VERTICES.len() as u32, 1, 0, 1);
device.cmd_end_render_pass(draw_command_buffer);
},
);
let present_info = vk::PresentInfoKHR {
wait_semaphore_count: 1,
p_wait_semaphores: &self.rendering_complete_semaphore,
swapchain_count: 1,
p_swapchains: &self.swapchain,
p_image_indices: &present_index,
..Default::default()
};
self.swapchain_loader
.queue_present(self.present_queue, &present_info)
.unwrap();
}
}
@ -1073,7 +1069,7 @@ impl Drop for VulkanWindowInner {
}
#[allow(clippy::too_many_arguments)]
fn record_submit_commandbuffer<F: FnOnce(&Device, vk::CommandBuffer)>(
unsafe fn record_submit_commandbuffer<F: FnOnce(&Device, vk::CommandBuffer)>(
device: &Device,
command_buffer: vk::CommandBuffer,
command_buffer_reuse_fence: vk::Fence,
@ -1083,47 +1079,45 @@ fn record_submit_commandbuffer<F: FnOnce(&Device, vk::CommandBuffer)>(
signal_semaphores: &[vk::Semaphore],
f: F,
) {
unsafe {
device
.wait_for_fences(&[command_buffer_reuse_fence], true, std::u64::MAX)
.expect("Wait for fence failed.");
device
.wait_for_fences(&[command_buffer_reuse_fence], true, std::u64::MAX)
.expect("Wait for fence failed.");
device
.reset_fences(&[command_buffer_reuse_fence])
.expect("Reset fences failed.");
device
.reset_fences(&[command_buffer_reuse_fence])
.expect("Reset fences failed.");
device
.reset_command_buffer(
command_buffer,
vk::CommandBufferResetFlags::RELEASE_RESOURCES,
)
.expect("Reset command buffer failed.");
device
.reset_command_buffer(
command_buffer,
vk::CommandBufferResetFlags::RELEASE_RESOURCES,
)
.expect("Reset command buffer failed.");
let command_buffer_begin_info = vk::CommandBufferBeginInfo::builder()
.flags(vk::CommandBufferUsageFlags::ONE_TIME_SUBMIT)
.build();
let command_buffer_begin_info = vk::CommandBufferBeginInfo::builder()
.flags(vk::CommandBufferUsageFlags::ONE_TIME_SUBMIT)
.build();
device
.begin_command_buffer(command_buffer, &command_buffer_begin_info)
.expect("Begin commandbuffer");
f(device, command_buffer);
device
.end_command_buffer(command_buffer)
.expect("End commandbuffer");
device
.begin_command_buffer(command_buffer, &command_buffer_begin_info)
.expect("Begin commandbuffer");
f(device, command_buffer);
device
.end_command_buffer(command_buffer)
.expect("End commandbuffer");
let command_buffers = vec![command_buffer];
let command_buffers = vec![command_buffer];
let submit_info = vk::SubmitInfo::builder()
.wait_semaphores(wait_semaphores)
.wait_dst_stage_mask(wait_mask)
.command_buffers(&command_buffers)
.signal_semaphores(signal_semaphores)
.build();
let submit_info = vk::SubmitInfo::builder()
.wait_semaphores(wait_semaphores)
.wait_dst_stage_mask(wait_mask)
.command_buffers(&command_buffers)
.signal_semaphores(signal_semaphores)
.build();
device
.queue_submit(submit_queue, &[submit_info], command_buffer_reuse_fence)
.expect("queue submit failed.");
}
device
.queue_submit(submit_queue, &[submit_info], command_buffer_reuse_fence)
.expect("queue submit failed.");
}
fn find_memorytype_index(