test(wgpu): Add WGPU render test

Also rename triangle -> render
This commit is contained in:
chyyran 2024-08-23 02:27:55 -04:00 committed by Ronny Chan
parent 2904a2ac10
commit 6de2de8d12
7 changed files with 327 additions and 52 deletions

15
Cargo.lock generated
View file

@ -1471,6 +1471,18 @@ dependencies = [
"zune-jpeg", "zune-jpeg",
] ]
[[package]]
name = "image-compare"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96cd73af13ae2e7220a1c02fe7d6bb53be50612ba7fabbb5c88e7753645f1f3c"
dependencies = [
"image",
"itertools",
"rayon",
"thiserror",
]
[[package]] [[package]]
name = "image-webp" name = "image-webp"
version = "0.1.3" version = "0.1.3"
@ -1952,10 +1964,13 @@ dependencies = [
"ash", "ash",
"gfx-maths", "gfx-maths",
"image", "image",
"image-compare",
"librashader", "librashader",
"librashader-runtime", "librashader-runtime",
"objc2 0.5.2", "objc2 0.5.2",
"objc2-metal", "objc2-metal",
"parking_lot",
"pollster",
"wgpu", "wgpu",
"wgpu-types", "wgpu-types",
"windows 0.58.0", "windows 0.58.0",

View file

@ -13,6 +13,9 @@ image = "0.25.2"
gfx-maths = "0.2.8" gfx-maths = "0.2.8"
ash = "0.38.0+1.3.281" ash = "0.38.0+1.3.281"
pollster = "0.3.0"
parking_lot = "0.12.3"
image-compare = "0.4.1"
[target.'cfg(windows)'.dependencies.windows] [target.'cfg(windows)'.dependencies.windows]
workspace = true workspace = true

View file

@ -1,16 +1,3 @@
mod triangle;
pub fn add(left: u64, right: u64) -> u64 { /// Render tests
left + right pub mod render;
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn it_works() {
let result = add(2, 2);
assert_eq!(result, 4);
}
}

View file

@ -0,0 +1,66 @@
pub mod d3d11;
pub mod wgpu;
use std::path::Path;
/// Test harness to set up a device, render a triangle, and apply a shader
pub trait RenderTest {
/// Render a shader onto an image buffer, applying the provided shader.
///
/// The test should render in linear colour space for proper comparison against
/// backends.
///
/// For testing purposes, it is often that a single image will be reused with multiple
/// shader presets, so the actual image that a shader will be applied to
/// will often be part of the test harness object.
fn render(
&self,
path: impl AsRef<Path>,
frame_count: usize,
) -> anyhow::Result<image::RgbaImage>;
}
#[cfg(test)]
mod test {
use crate::render::RenderTest;
use image::codecs::png::PngEncoder;
use std::fs::File;
const IMAGE_PATH: &str = "../triangle.png";
const FILTER_PATH: &str = "../test/shaders_slang/crt/crt-royale.slangp";
#[test]
pub fn test_d3d11() -> anyhow::Result<()> {
let d3d11 = super::d3d11::Direct3D11::new(IMAGE_PATH)?;
let image = d3d11.render(FILTER_PATH, 100)?;
let out = File::create("out.png")?;
image.write_with_encoder(PngEncoder::new(out))?;
Ok(())
}
#[test]
pub fn test_wgpu() -> anyhow::Result<()> {
let wgpu = super::wgpu::Wgpu::new(IMAGE_PATH)?;
let image = wgpu.render(FILTER_PATH, 100)?;
let out = File::create("out.png")?;
image.write_with_encoder(PngEncoder::new(out))?;
Ok(())
}
#[test]
pub fn compare() -> anyhow::Result<()> {
let d3d11 = super::d3d11::Direct3D11::new(IMAGE_PATH)?;
let wgpu = super::wgpu::Wgpu::new(IMAGE_PATH)?;
let wgpu_image = wgpu.render(FILTER_PATH, 100)?;
let d3d11_image = d3d11.render(FILTER_PATH, 100)?;
let similarity = image_compare::rgba_hybrid_compare(&wgpu_image, &d3d11_image)?;
assert!(similarity.score > 0.95);
Ok(())
}
}

View file

@ -0,0 +1,241 @@
use crate::render::RenderTest;
use anyhow::anyhow;
use image::RgbaImage;
use librashader::runtime::wgpu::*;
use librashader::runtime::Viewport;
use librashader_runtime::image::{Image, UVDirection};
use std::io::{Cursor, Write};
use std::ops::{Deref, DerefMut};
use std::path::Path;
use std::sync::Arc;
use wgpu::{Adapter, Device, Instance, Queue, Texture};
use wgpu_types::{
BufferAddress, BufferDescriptor, BufferUsages, CommandEncoderDescriptor, ImageCopyBuffer,
ImageDataLayout, Maintain, TextureDescriptor, TextureDimension, TextureFormat, TextureUsages,
};
use parking_lot::Mutex;
pub struct Wgpu {
instance: Instance,
adapter: Adapter,
device: Arc<Device>,
queue: Arc<Queue>,
image: Image,
texture: Arc<Texture>,
}
struct BufferDimensions {
width: usize,
height: usize,
unpadded_bytes_per_row: usize,
padded_bytes_per_row: usize,
}
impl BufferDimensions {
fn new(width: usize, height: usize) -> Self {
let bytes_per_pixel = size_of::<u32>();
let unpadded_bytes_per_row = width * bytes_per_pixel;
let align = wgpu::COPY_BYTES_PER_ROW_ALIGNMENT as usize;
let padded_bytes_per_row_padding = (align - unpadded_bytes_per_row % align) % align;
let padded_bytes_per_row = unpadded_bytes_per_row + padded_bytes_per_row_padding;
Self {
width,
height,
unpadded_bytes_per_row,
padded_bytes_per_row,
}
}
}
impl RenderTest for Wgpu {
fn render(&self, path: impl AsRef<Path>, frame_count: usize) -> anyhow::Result<RgbaImage> {
let mut chain = FilterChain::load_from_path(
path,
Arc::clone(&self.device),
Arc::clone(&self.queue),
Some(&FilterChainOptions {
force_no_mipmaps: false,
enable_cache: false,
adapter_info: None,
}),
)?;
let mut cmd = self
.device
.create_command_encoder(&CommandEncoderDescriptor { label: None });
let output_tex = self.device.create_texture(&TextureDescriptor {
label: None,
size: self.texture.size(),
mip_level_count: 1,
sample_count: 1,
dimension: TextureDimension::D2,
format: TextureFormat::Rgba8Unorm,
usage: TextureUsages::RENDER_ATTACHMENT | TextureUsages::COPY_SRC,
view_formats: &[wgpu::TextureFormat::Rgba8Unorm],
});
let buffer_dimensions =
BufferDimensions::new(output_tex.width() as usize, output_tex.height() as usize);
let output_buf = Arc::new(self.device.create_buffer(&BufferDescriptor {
label: None,
size: (buffer_dimensions.padded_bytes_per_row * buffer_dimensions.height)
as BufferAddress, // 4bpp
usage: BufferUsages::COPY_DST | BufferUsages::MAP_READ,
mapped_at_creation: false,
}));
let view = output_tex.create_view(&wgpu::TextureViewDescriptor::default());
let output = WgpuOutputView::new_from_raw(
&view,
output_tex.size().into(),
TextureFormat::Rgba8Unorm,
);
chain.frame(
Arc::clone(&self.texture),
&Viewport::new_render_target_sized_origin(output, None)?,
&mut cmd,
frame_count,
None,
)?;
cmd.copy_texture_to_buffer(
output_tex.as_image_copy(),
ImageCopyBuffer {
buffer: &output_buf,
layout: ImageDataLayout {
offset: 0,
bytes_per_row: Some(buffer_dimensions.padded_bytes_per_row as u32),
rows_per_image: None,
},
},
output_tex.size(),
);
let si = self.queue.submit([cmd.finish()]);
self.device.poll(Maintain::WaitForSubmissionIndex(si));
let capturable = Arc::clone(&output_buf);
let mut pixels = Arc::new(Mutex::new(Vec::new()));
let pixels_async = Arc::clone(&pixels);
output_buf
.slice(..)
.map_async(wgpu::MapMode::Read, move |r| {
if r.is_ok() {
let buffer = capturable.slice(..).get_mapped_range();
let mut pixels = pixels_async.lock();
pixels.resize(buffer.len(), 0);
let mut cursor = Cursor::new(pixels.deref_mut());
for chunk in buffer.chunks(buffer_dimensions.padded_bytes_per_row) {
cursor
.write_all(&chunk[..buffer_dimensions.unpadded_bytes_per_row])
.unwrap()
}
cursor.into_inner();
}
capturable.unmap();
});
self.device.poll(Maintain::Wait);
if pixels.lock().len() == 0 {
return Err(anyhow!("failed to copy pixels from buffer"));
}
let image = RgbaImage::from_raw(
output_tex.width(),
output_tex.height(),
pixels.lock().to_vec(),
)
.ok_or(anyhow!("Unable to create image from data"))?;
Ok(image)
}
}
impl Wgpu {
pub fn new(image: impl AsRef<Path>) -> anyhow::Result<Self> {
pollster::block_on(async {
let instance = wgpu::Instance::default();
let adapter = instance
.request_adapter(&wgpu::RequestAdapterOptions {
power_preference: wgpu::PowerPreference::default(),
compatible_surface: None,
force_fallback_adapter: false,
})
.await
.ok_or(anyhow!("Couldn't request WGPU adapter"))?;
let (device, queue) = adapter
.request_device(
&wgpu::DeviceDescriptor {
required_features: wgpu::Features::ADDRESS_MODE_CLAMP_TO_BORDER
| wgpu::Features::PIPELINE_CACHE
| wgpu::Features::TEXTURE_ADAPTER_SPECIFIC_FORMAT_FEATURES
| wgpu::Features::FLOAT32_FILTERABLE,
required_limits: wgpu::Limits::default(),
label: None,
memory_hints: Default::default(),
},
None,
)
.await?;
let (image, texture) = Self::load_image(&device, &queue, image)?;
Ok(Self {
instance,
adapter,
device: Arc::new(device),
queue: Arc::new(queue),
image,
texture: Arc::new(texture),
})
})
}
fn load_image(
device: &Device,
queue: &Queue,
path: impl AsRef<Path>,
) -> anyhow::Result<(Image, Texture)> {
let image = Image::load(path, UVDirection::TopLeft)?;
let texture = device.create_texture(&TextureDescriptor {
size: image.size.into(),
mip_level_count: 1,
sample_count: 1,
dimension: TextureDimension::D2,
usage: TextureUsages::TEXTURE_BINDING | TextureUsages::COPY_DST,
format: wgpu::TextureFormat::Rgba8Unorm,
view_formats: &[wgpu::TextureFormat::Rgba8Unorm],
label: None,
});
queue.write_texture(
wgpu::ImageCopyTexture {
texture: &texture,
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
&image.bytes,
wgpu::ImageDataLayout {
offset: 0,
bytes_per_row: Some(4 * image.size.width),
rows_per_image: None,
},
image.size.into(),
);
let si = queue.submit([]);
device.poll(Maintain::WaitForSubmissionIndex(si));
Ok((image, texture))
}
}

View file

@ -1,37 +0,0 @@
mod d3d11;
use std::path::Path;
/// Test harness to set up a device, render a triangle, and apply a shader
pub trait TriangleTest {
/// Render a triangle to an image buffer, applying the provided shader.
///
/// The test should render in linear colour space for proper comparison against
/// backends.
fn triangle(
&self,
image: impl AsRef<Path>,
path: impl AsRef<Path>,
frame_count: usize,
) -> anyhow::Result<image::RgbaImage>;
}
#[cfg(test)]
mod test {
use crate::triangle::TriangleTest;
use image::codecs::png::PngEncoder;
use std::fs::File;
const IMAGE_PATH: &str = "../triangle.png";
const FILTER_PATH: &str = "../test/shaders_slang/crt/crt-royale.slangp";
#[test]
pub fn test_d3d11() -> anyhow::Result<()> {
let d3d11 = super::d3d11::Direct3D11::new()?;
let image = d3d11.triangle(IMAGE_PATH, FILTER_PATH, 100)?;
let out = File::create("out.png")?;
image.write_with_encoder(PngEncoder::new(out))?;
Ok(())
}
}