presets: greatly improve path resolution logic and parsing of invalid constructs

This commit is contained in:
chyyran 2023-02-08 03:05:55 -05:00
parent 4947eff4d1
commit 48a1b28665
9 changed files with 254 additions and 61 deletions

42
BROKEN_SHADERS.md Normal file
View file

@ -0,0 +1,42 @@
# Broken Shader Presets
The following shaders are known to be broken due to various issues.
This list is updated as of [slang-shaders@`356678e`](https://github.com/libretro/slang-shaders/commit/356678ec53ca940a53fa509eff0b65bb63a403bb)
## Parsing errors
librashader's preset parser is somewhat stricter than RetroArch in what it accepts. All shaders and textures in a preset must
resolve to a fully canonical path to properly parse. The following shaders have broken paths.
* `bezel/Mega_Bezel/shaders/hyllian/crt-super-xbr/crt-super-xbr.slangp`: Missing `bezel/Mega_Bezel/shaders/hyllian/crt-super-xbr/shaders/linearize.slang`
* `crt/crt-maximus-royale-fast-mode.slangp`: Missing `crt/shaders/crt-maximus-royale/FrameTextures/16_9/TV_decor_1.png`
* `crt/crt-maximus-royale-half-res-mode.slangp`: Missing `crt/shaders/crt-maximus-royale/FrameTextures/16_9/TV_decor_1.png`
* `crt/crt-maximus-royale.slangp`: Missing `crt/shaders/crt-maximus-royale/FrameTextures/16_9/TV_decor_1.png`
* `crt/mame_hlsl.slangp`: Missing `crt/shaders/mame_hlsl/shaders/lut.slang`
* `denoisers/fast-bilateral-super-2xbr-3d-3p.slangp`: Missing `xbr/shaders/super-xbr/super-2xbr-3d-pass0.slang`
* `presets/tvout/tvout+ntsc-256px-composite.slangp`: Missing `ntsc/shaders/ntsc-pass1-composite-3phase.slang`
* `presets/tvout/tvout+ntsc-256px-svideo.slangp`: Missing `ntsc/shaders/ntsc-pass1-svideo-3phase.slang`
* `presets/tvout/tvout+ntsc-2phase-composite.slangp`: Missing `ntsc/shaders/ntsc-pass1-composite-2phase.slang`
* `presets/tvout/tvout+ntsc-2phase-svideo.slangp`: Missing `ntsc/shaders/ntsc-pass1-svideo-2phase.slang`
* `presets/tvout/tvout+ntsc-320px-composite.slangp`: Missing `ntsc/shaders/ntsc-pass1-composite-2phase.slang`
* `presets/tvout/tvout+ntsc-320px-svideo.slangp`: Missing `ntsc/shaders/ntsc-pass1-svideo-2phase.slang`
* `presets/tvout/tvout+ntsc-3phase-composite.slangp`: Missing `ntsc/shaders/ntsc-pass1-composite-3phase.slang`
* `presets/tvout/tvout+ntsc-3phase-svideo.slangp`: Missing `ntsc/shaders/ntsc-pass1-svideo-3phase.slang`
* `presets/tvout/tvout+ntsc-nes.slangp`: Missing `ntsc/shaders/ntsc-pass1-composite-3phase.slang`
* `presets/tvout+interlacing/tvout+ntsc-256px-composite+interlacing.slangp`: Missing `ntsc/shaders/ntsc-pass1-composite-3phase.slang`
* `presets/tvout+interlacing/tvout+ntsc-256px-svideo+interlacing.slangp`: Missing `ntsc/shaders/ntsc-pass1-svideo-3phase.slang`
* `presets/tvout+interlacing/tvout+ntsc-2phase-composite+interlacing.slangp`: Missing `ntsc/shaders/ntsc-pass1-composite-2phase.slang`
* `presets/tvout+interlacing/tvout+ntsc-2phase-svideo+interlacing.slangp`: Missing `ntsc/shaders/ntsc-pass1-svideo-2phase.slang`
* `presets/tvout+interlacing/tvout+ntsc-320px-composite+interlacing.slangp`: Missing `ntsc/shaders/ntsc-pass1-composite-2phase.slang`
* `presets/tvout+interlacing/tvout+ntsc-320px-svideo+interlacing.slangp`: Missing `ntsc/shaders/ntsc-pass1-svideo-2phase.slang`
* `presets/tvout+interlacing/tvout+ntsc-3phase-composite+interlacing.slangp`: Missing `ntsc/shaders/ntsc-pass1-composite-3phase.slang`
* `presets/tvout+interlacing/tvout+ntsc-3phase-svideo+interlacing.slangp`: Missing `ntsc/shaders/ntsc-pass1-svideo-3phase.slang`
* `presets/tvout+interlacing/tvout+ntsc-nes+interlacing.slangp`: Missing `ntsc/shaders/ntsc-pass1-composite-3phase.slang`
* `scalefx/shaders/old/scalefx-9x.slangp`: Missing `../stock.slang`
* `scalefx/shaders/old/scalefx.slangp`: Missing `../stock.slang`
librashader's parser is fuzzed with slang-shaders and will accept invalid keys like `mipmap1` or `filter_texture = linear`
to account for shader presets that use these invalid constructs. No known shader presets fail to parse due to syntax errors
that haven't already been accounted for.

7
Cargo.lock generated
View file

@ -619,6 +619,12 @@ dependencies = [
"cmake", "cmake",
] ]
[[package]]
name = "glob"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
[[package]] [[package]]
name = "hashbrown" name = "hashbrown"
version = "0.12.3" version = "0.12.3"
@ -793,6 +799,7 @@ dependencies = [
name = "librashader-presets" name = "librashader-presets"
version = "0.1.0-beta.15" version = "0.1.0-beta.15"
dependencies = [ dependencies = [
"glob",
"librashader-common", "librashader-common",
"nom", "nom",
"nom_locate", "nom_locate",

View file

@ -99,6 +99,18 @@ impl FromStr for WrapMode {
} }
} }
impl FromStr for FilterMode {
type Err = Infallible;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(match s {
"linear" => FilterMode::Linear,
"nearest" => FilterMode::Nearest,
_ => FilterMode::Nearest,
})
}
}
#[repr(i32)] #[repr(i32)]
#[derive(Copy, Clone, Default, Debug, Eq, PartialEq, Hash)] #[derive(Copy, Clone, Default, Debug, Eq, PartialEq, Hash)]
/// The wrapping (address) mode for a texture sampler. /// The wrapping (address) mode for a texture sampler.

View file

@ -20,3 +20,6 @@ num-traits = "0.2"
[features] [features]
parse_legacy_glsl = [] parse_legacy_glsl = []
[dev-dependencies]
glob = "0.3.1"

View file

@ -60,7 +60,7 @@ fn extract_from_quotes(input: Span) -> IResult<Span, Span> {
} }
fn multiline_comment(i: Span) -> IResult<Span, Span> { fn multiline_comment(i: Span) -> IResult<Span, Span> {
delimited(tag("/*"), is_not("*/"), tag("*/"))(i) delimited(tag("/*"), take_until("*/"), tag("*/"))(i)
} }
fn single_comment(i: Span) -> IResult<Span, Span> { fn single_comment(i: Span) -> IResult<Span, Span> {
@ -97,7 +97,6 @@ fn parse_key_value(input: Span) -> IResult<Span, Token> {
let (_, value) = let (_, value) =
take_until::<_, _, nom::error::Error<Span>>("#")(value).unwrap_or((input, value)); take_until::<_, _, nom::error::Error<Span>>("#")(value).unwrap_or((input, value));
let (_, (_, value)) = map_res(not_line_ending, optional_quotes)(value)?; let (_, (_, value)) = map_res(not_line_ending, optional_quotes)(value)?;
Ok((input, Token { key, value })) Ok((input, Token { key, value }))
} }

View file

@ -66,7 +66,11 @@ impl Value {
} }
fn from_int(input: Span) -> Result<i32, ParsePresetError> { fn from_int(input: Span) -> Result<i32, ParsePresetError> {
i32::from_str(input.trim()) // Presets like to commit ✨CRIMES✨ and end their lines with a ";".
// It's too hard to put this in the lexer because we can't tell between
// semicolon crimes or a valid param/texture name listing.
let to_parse = input.trim().trim_end_matches(";");
i32::from_str(to_parse)
.map_err(|_| ParsePresetError::ParserError { .map_err(|_| ParsePresetError::ParserError {
offset: input.location_offset(), offset: input.location_offset(),
row: input.location_line(), row: input.location_line(),
@ -74,7 +78,8 @@ fn from_int(input: Span) -> Result<i32, ParsePresetError> {
kind: ParseErrorKind::Int, kind: ParseErrorKind::Int,
}) })
.or_else(|e| { .or_else(|e| {
let result = f32::from_str(input.trim()).map_err(|_| e)?; // An even more egregious ✨CRIME✨ is using a float as a shader index.
let result = f32::from_str(to_parse).map_err(|_| e)?;
let result = result let result = result
.trunc() .trunc()
.to_i32() .to_i32()
@ -89,7 +94,10 @@ fn from_int(input: Span) -> Result<i32, ParsePresetError> {
} }
fn from_ul(input: Span) -> Result<u32, ParsePresetError> { fn from_ul(input: Span) -> Result<u32, ParsePresetError> {
u32::from_str(input.trim()).map_err(|_| ParsePresetError::ParserError { // Presets like to commit ✨CRIMES✨ and end their lines with a ";".
// It's too hard to put this in the lexer because we can't tell between
// semicolon crimes or a valid param/texture name listing.
u32::from_str(input.trim().trim_end_matches(";")).map_err(|_| ParsePresetError::ParserError {
offset: input.location_offset(), offset: input.location_offset(),
row: input.location_line(), row: input.location_line(),
col: input.get_column(), col: input.get_column(),
@ -98,14 +106,14 @@ fn from_ul(input: Span) -> Result<u32, ParsePresetError> {
} }
fn from_float(input: Span) -> Result<f32, ParsePresetError> { fn from_float(input: Span) -> Result<f32, ParsePresetError> {
f32::from_str(input.trim()).map_err(|_| { // Presets like to commit ✨CRIMES✨ and end their lines with a ";".
eprintln!("{input:?}"); // It's too hard to put this in the lexer because we can't tell between
ParsePresetError::ParserError { // semicolon crimes or a valid param/texture name listing.
f32::from_str(input.trim().trim_end_matches(";")).map_err(|_| ParsePresetError::ParserError {
offset: input.location_offset(), offset: input.location_offset(),
row: input.location_line(), row: input.location_line(),
col: input.get_column(), col: input.get_column(),
kind: ParseErrorKind::Float, kind: ParseErrorKind::Float,
}
}) })
} }
@ -140,44 +148,52 @@ fn parse_indexed_key<'a>(key: &'static str, input: Span<'a>) -> IResult<Span<'a>
pub const SHADER_MAX_REFERENCE_DEPTH: usize = 16; pub const SHADER_MAX_REFERENCE_DEPTH: usize = 16;
fn load_child_reference_strings( fn load_child_reference_strings(
mut root_references: Vec<PathBuf>, root_references: Vec<PathBuf>,
root_path: impl AsRef<Path>, root_path: impl AsRef<Path>,
) -> Result<Vec<(PathBuf, String)>, ParsePresetError> { ) -> Result<Vec<(PathBuf, String)>, ParsePresetError> {
let root_path = root_path.as_ref(); let root_path = root_path.as_ref();
let mut reference_depth = 0; let mut reference_depth = 0;
let mut reference_strings: Vec<(PathBuf, String)> = Vec::new(); let mut reference_strings: Vec<(PathBuf, String)> = Vec::new();
while let Some(reference_path) = root_references.pop() { let mut root_references = vec![(root_path.to_path_buf(), root_references)];
while let Some((reference_root, referenced_paths)) = root_references.pop() {
if reference_depth > SHADER_MAX_REFERENCE_DEPTH { if reference_depth > SHADER_MAX_REFERENCE_DEPTH {
return Err(ParsePresetError::ExceededReferenceDepth); return Err(ParsePresetError::ExceededReferenceDepth);
} }
// enter the current root
reference_depth += 1; reference_depth += 1;
let mut root_path = root_path.to_path_buf(); // canonicalize current root
root_path.push(reference_path); let reference_root = reference_root
let mut reference_root = root_path
.canonicalize() .canonicalize()
.map_err(|e| ParsePresetError::IOError(root_path, e))?; .map_err(|e| ParsePresetError::IOError(reference_root.to_path_buf(), e))?;
// resolve all referenced paths against root
// println!("Resolving {referenced_paths:?} against {reference_root:?}.");
for path in referenced_paths {
let mut path = reference_root
.join(path.clone())
.canonicalize()
.map_err(|e| ParsePresetError::IOError(path.clone(), e))?;
// println!("Opening {:?}", path);
let mut reference_contents = String::new(); let mut reference_contents = String::new();
File::open(&reference_root) File::open(&path)
.map_err(|e| ParsePresetError::IOError(reference_root.clone(), e))? .map_err(|e| ParsePresetError::IOError(path.clone(), e))?
.read_to_string(&mut reference_contents) .read_to_string(&mut reference_contents)
.map_err(|e| ParsePresetError::IOError(reference_root.clone(), e))?; .map_err(|e| ParsePresetError::IOError(path.clone(), e))?;
let mut new_tokens = do_lex(&reference_contents)?; let mut new_tokens = do_lex(&reference_contents)?;
let mut new_references: Vec<PathBuf> = new_tokens let new_references: Vec<PathBuf> = new_tokens
.drain_filter(|token| *token.key.fragment() == "#reference") .drain_filter(|token| *token.key.fragment() == "#reference")
.map(|value| PathBuf::from(*value.value.fragment())) .map(|value| PathBuf::from(*value.value.fragment()))
.collect(); .collect();
root_references.append(&mut new_references); path.pop();
reference_strings.push((path.clone(), reference_contents));
// return the relative root that shader and texture paths are to be resolved against. if !new_references.is_empty() {
if !reference_root.is_dir() { root_references.push((path, new_references));
reference_root.pop(); }
} }
// trim end space
reference_strings.push((reference_root, reference_contents));
} }
Ok(reference_strings) Ok(reference_strings)
@ -294,41 +310,54 @@ pub fn parse_values(
} }
} }
let mut tokens: Vec<Token> = all_tokens let mut tokens: Vec<(&Path, Token)> = all_tokens
.into_iter() .into_iter()
.flat_map(|(_, token)| token) .flat_map(|(p, token)| token.into_iter().map(move |t| (p, t)))
.collect(); .collect();
for (texture, path) in textures { for (texture, path) in textures {
let mipmap = remove_if(&mut tokens, |t| { let mipmap = remove_if(&mut tokens, |(_, t)| {
t.key.starts_with(*texture) t.key.starts_with(*texture)
&& t.key.ends_with("_mipmap") && t.key.ends_with("_mipmap")
&& t.key.len() == texture.len() + "_mipmap".len() && t.key.len() == texture.len() + "_mipmap".len()
}) })
.map_or_else(|| Ok(false), |v| from_bool(v.value))?; .map_or_else(|| Ok(false), |(_, v)| from_bool(v.value))?;
let linear = remove_if(&mut tokens, |t| { let linear = remove_if(&mut tokens, |(_, t)| {
t.key.starts_with(*texture) t.key.starts_with(*texture)
&& t.key.ends_with("_linear") && t.key.ends_with("_linear")
&& t.key.len() == texture.len() + "_linear".len() && t.key.len() == texture.len() + "_linear".len()
}) })
.map_or_else(|| Ok(false), |v| from_bool(v.value))?; .map_or_else(|| Ok(false), |(_, v)| from_bool(v.value))?;
let wrap_mode = remove_if(&mut tokens, |t| { let wrap_mode = remove_if(&mut tokens, |(_, t)| {
t.key.starts_with(*texture) t.key.starts_with(*texture)
&& t.key.ends_with("_wrap_mode") && (t.key.ends_with("_wrap_mode") || t.key.ends_with("_repeat_mode"))
&& t.key.len() == texture.len() + "_wrap_mode".len() && (t.key.len() == texture.len() + "_wrap_mode".len()
|| t.key.len() == texture.len() + "_repeat_mode".len())
}) })
// NOPANIC: infallible // NOPANIC: infallible
.map_or_else(WrapMode::default, |v| WrapMode::from_str(&v.value).unwrap()); .map_or_else(WrapMode::default, |(_, v)| {
WrapMode::from_str(&v.value).unwrap()
});
// This really isn't supported but crt-torridgristle uses this syntax.
// Again, don't know how this works in RA but RA's parser isn't as strict as ours.
let filter = remove_if(&mut tokens, |(_, t)| {
t.key.starts_with("filter_")
&& t.key.ends_with(*texture)
&& t.key.len() == "filter_".len() + texture.len()
})
// NOPANIC: infallible
.map_or(None, |(_, v)| Some(FilterMode::from_str(&v.value).unwrap()));
values.push(Value::Texture { values.push(Value::Texture {
name: texture.to_string(), name: texture.to_string(),
filter_mode: if linear { filter_mode: filter.unwrap_or(if linear {
FilterMode::Linear FilterMode::Linear
} else { } else {
FilterMode::Nearest FilterMode::Nearest
}, }),
wrap_mode, wrap_mode,
mipmap, mipmap,
path, path,
@ -336,10 +365,15 @@ pub fn parse_values(
} }
let mut rest_tokens = Vec::new(); let mut rest_tokens = Vec::new();
// no more textures left in the token tree // hopefully no more textures left in the token tree
for token in tokens { for (p, token) in tokens {
if parameter_names.contains(token.key.fragment()) { if parameter_names.contains(token.key.fragment()) {
let param_val = from_float(token.value)?; let param_val = from_float(token.value)
// This is literally just to work around BEAM_PROFILE in crt-hyllian-sinc-glow.slangp
// which has ""0'.000000". This somehow works in RA because it defaults to 0, probably.
// This hack is only used for **known** parameter names. If we tried this for undeclared
// params (god help me), it would be pretty bad because we lose texture path fallback.
.unwrap_or(0.0);
values.push(Value::Parameter( values.push(Value::Parameter(
token.key.fragment().to_string(), token.key.fragment().to_string(),
param_val, param_val,
@ -375,6 +409,13 @@ pub fn parse_values(
continue; continue;
} }
// crt-geom uses repeat_mode...
if let Ok((_, idx)) = parse_indexed_key("repeat_mode", token.key) {
let wrap_mode = WrapMode::from_str(&token.value).unwrap();
values.push(Value::WrapMode(idx, wrap_mode));
continue;
}
// crt-royale uses 'texture_wrap_mode' instead of 'wrap_mode', I have no idea // crt-royale uses 'texture_wrap_mode' instead of 'wrap_mode', I have no idea
// how this possibly could work in RA, but here it is.. // how this possibly could work in RA, but here it is..
if let Ok((_, idx)) = parse_indexed_key("texture_wrap_mode", token.key) { if let Ok((_, idx)) = parse_indexed_key("texture_wrap_mode", token.key) {
@ -407,6 +448,13 @@ pub fn parse_values(
continue; continue;
} }
// vector-glow-alt-render.slangp uses "mipmap" for pass 1, but "mipmap_input" for everything else.
if let Ok((_, idx)) = parse_indexed_key("mipmap", token.key) {
let enabled = from_bool(token.value)?;
values.push(Value::MipmapInput(idx, enabled));
continue;
}
if let Ok((_, idx)) = parse_indexed_key("alias", token.key) { if let Ok((_, idx)) = parse_indexed_key("alias", token.key) {
values.push(Value::Alias(idx, token.value.to_string())); values.push(Value::Alias(idx, token.value.to_string()));
continue; continue;
@ -426,12 +474,11 @@ pub fn parse_values(
values.push(Value::ScaleTypeY(idx, scale_type)); values.push(Value::ScaleTypeY(idx, scale_type));
continue; continue;
} }
rest_tokens.push(token) rest_tokens.push((p, token))
} }
// todo: handle rest_tokens (scale needs to know abs or float), let mut undeclared_textures = Vec::new();
for (path, token) in &rest_tokens {
for token in rest_tokens {
if let Ok((_, idx)) = parse_indexed_key("scale", token.key) { if let Ok((_, idx)) = parse_indexed_key("scale", token.key) {
let scale = if values.iter().any(|t| matches!(*t, Value::ScaleType(match_idx, ScaleType::Absolute) if match_idx == idx)) { let scale = if values.iter().any(|t| matches!(*t, Value::ScaleType(match_idx, ScaleType::Absolute) if match_idx == idx)) {
let scale = from_int(token.value)?; let scale = from_int(token.value)?;
@ -470,12 +517,68 @@ pub fn parse_values(
} }
// handle undeclared parameters after parsing everything else as a last resort. // handle undeclared parameters after parsing everything else as a last resort.
let param_val = from_float(token.value)?; if let Ok(param_val) = from_float(token.value) {
values.push(Value::Parameter( values.push(Value::Parameter(
token.key.fragment().to_string(), token.key.fragment().to_string(),
param_val, param_val,
)); ));
} }
// very last resort, assume undeclared texture (must have extension)
else if Path::new(token.value.fragment()).extension().is_some()
&& ["_mipmap", "_linear", "_wrap_mode", "_repeat_mode"]
.iter()
.all(|k| !token.key.ends_with(k))
{
let mut relative_path = path.to_path_buf();
relative_path.push(*token.value.fragment());
relative_path
.canonicalize()
.map_err(|e| ParsePresetError::IOError(relative_path.clone(), e))?;
undeclared_textures.push((token.key, relative_path));
}
// we tried our best
}
// Since there are undeclared textures we need to deal with potential mipmap information.
for (texture, path) in undeclared_textures {
let mipmap = remove_if(&mut rest_tokens, |(_, t)| {
t.key.starts_with(*texture)
&& t.key.ends_with("_mipmap")
&& t.key.len() == texture.len() + "_mipmap".len()
})
.map_or_else(|| Ok(false), |(_, v)| from_bool(v.value))?;
let linear = remove_if(&mut rest_tokens, |(_, t)| {
t.key.starts_with(*texture)
&& t.key.ends_with("_linear")
&& t.key.len() == texture.len() + "_linear".len()
})
.map_or_else(|| Ok(false), |(_, v)| from_bool(v.value))?;
let wrap_mode = remove_if(&mut rest_tokens, |(_, t)| {
t.key.starts_with(*texture)
&& (t.key.ends_with("_wrap_mode") || t.key.ends_with("_repeat_mode"))
&& (t.key.len() == texture.len() + "_wrap_mode".len()
|| t.key.len() == texture.len() + "_repeat_mode".len())
})
// NOPANIC: infallible
.map_or_else(WrapMode::default, |(_, v)| {
WrapMode::from_str(&v.value).unwrap()
});
values.push(Value::Texture {
name: texture.to_string(),
filter_mode: if linear {
FilterMode::Linear
} else {
FilterMode::Nearest
},
wrap_mode,
mipmap,
path,
})
}
// all tokens should be ok to process now. // all tokens should be ok to process now.
Ok(values) Ok(values)

View file

@ -0,0 +1,22 @@
use glob::glob;
use librashader_presets::ShaderPreset;
#[test]
fn parses_all_slang_presets() {
for entry in glob("../test/slang-shaders/**/*.slangp").unwrap() {
if let Ok(path) = entry {
if let Err(e) = ShaderPreset::try_parse(&path) {
println!("Could not parse {}: {:?}", path.display(), e)
}
}
}
}
#[test]
fn parses_problematic() {
for entry in glob("../test/slang-shaders/crt/crt-hyllian-sinc-glow.slangp").unwrap() {
if let Ok(path) = entry {
ShaderPreset::try_parse(&path).expect(&format!("Failed to parse {}", path.display()));
}
}
}

View file

@ -33,7 +33,7 @@ mod tests {
fn triangle_d3d12() { fn triangle_d3d12() {
let sample = hello_triangle::d3d12_hello_triangle::Sample::new( let sample = hello_triangle::d3d12_hello_triangle::Sample::new(
// "../test/slang-shaders/crt/crt-lottes.slangp", // "../test/slang-shaders/crt/crt-lottes.slangp",
"../test/slang-shaders/bezel/Mega_Bezel/Presets/MBZ__0__SMOOTH-ADV.slangp", "../test/slang-shaders/bezel/Mega_Bezel/Presets/Variations/Megatron/ADV/crt-sony-megatron-aeg-CTV-4800-VT-sdr.slangp",
// "../test/slang-shaders/crt/crt-royale.slangp", // "../test/slang-shaders/crt/crt-royale.slangp",
// "../test/slang-shaders/vhs/VHSPro.slangp", // "../test/slang-shaders/vhs/VHSPro.slangp",
&SampleCommandLine { &SampleCommandLine {

View file

@ -49,6 +49,11 @@
/// Shader presets contain shader and texture parameters, and the order in which to apply a set of /// Shader presets contain shader and texture parameters, and the order in which to apply a set of
/// shaders in a filter chain. A librashader runtime takes a resulting [`ShaderPreset`](crate::presets::ShaderPreset) /// shaders in a filter chain. A librashader runtime takes a resulting [`ShaderPreset`](crate::presets::ShaderPreset)
/// as input to create a filter chain. /// as input to create a filter chain.
///
/// librashader's preset parser has been tested against all presets in the [slang-shaders](https://github.com/libretro/slang-shaders) repository
/// to generally good compatibility. However, the preset parser requires all referenced paths to resolve to a canonical, existing path relative
/// to the preset file. The handful of shaders that fail to parse due to this or other reasons are
/// listed at [`BROKEN_SHADERS.md`](https://github.com/SnowflakePowered/librashader/blob/master/BROKEN_SHADERS.md).
pub mod presets { pub mod presets {
use librashader_preprocess::{PreprocessError, ShaderParameter, ShaderSource}; use librashader_preprocess::{PreprocessError, ShaderParameter, ShaderSource};
pub use librashader_presets::*; pub use librashader_presets::*;