Fix matrix when creating a scaling renderer (#143)

- This issue can be seen when creating a window and pixel buffer that have differing sizes; the image will be stretched to fill the window.
- If the window supports the resize callback to correct the image aspect ratio, resizing the window will correct the matrix immediately, adding the black border as expected. This is a jarring effect when the texture size ratio is not an integer.
- This bug also causes issues with the new `resize_buffer()` API.
This commit is contained in:
Jay Oster 2021-03-03 10:45:14 -08:00 committed by GitHub
parent f2d4a4f084
commit 5dbe87d0c0
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 4 additions and 3 deletions

View file

@ -309,6 +309,7 @@ pub(crate) fn create_backing_texture(
device,
&texture_view,
&texture_extent,
surface_size,
render_texture_format,
);

View file

@ -1,3 +1,4 @@
use crate::SurfaceSize;
use ultraviolet::Mat4;
use wgpu::util::DeviceExt;
@ -17,6 +18,7 @@ impl ScalingRenderer {
device: &wgpu::Device,
texture_view: &wgpu::TextureView,
texture_size: &wgpu::Extent3d,
surface_size: &SurfaceSize,
render_texture_format: wgpu::TextureFormat,
) -> Self {
let vs_module = device.create_shader_module(&wgpu::include_spirv!("../shaders/vert.spv"));
@ -39,11 +41,9 @@ impl ScalingRenderer {
});
// Create uniform buffer
// TODO: This should also have the width / height of the of the window surface,
// so that it won't break when the window is created with a different size.
let matrix = ScalingMatrix::new(
(texture_size.width as f32, texture_size.height as f32),
(texture_size.width as f32, texture_size.height as f32),
(surface_size.width as f32, surface_size.height as f32),
);
let transform_bytes = matrix.as_bytes();
let uniform_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {