presets: move slang stuff to slang module

This commit is contained in:
chyyran 2023-02-24 01:01:07 -05:00
parent 34b54b18e1
commit cb0a0920e2
10 changed files with 631 additions and 581 deletions

View file

@ -94,6 +94,10 @@ impl FromStr for WrapMode {
"clamp_to_edge" => WrapMode::ClampToEdge,
"repeat" => WrapMode::Repeat,
"mirrored_repeat" => WrapMode::MirroredRepeat,
// quark
"border" => WrapMode::ClampToBorder,
"edge" => WrapMode::ClampToEdge,
_ => WrapMode::ClampToBorder,
})
}

View file

@ -13,6 +13,11 @@ mod error;
mod parse;
mod preset;
mod quark;
mod slang;
pub use error::*;
pub use preset::*;
pub(crate) fn remove_if<T>(values: &mut Vec<T>, f: impl FnMut(&T) -> bool) -> Option<T> {
values.iter().position(f).map(|idx| values.remove(idx))
}

View file

@ -1,27 +1,17 @@
use std::path::Path;
use nom_locate::LocatedSpan;
use std::str;
mod preset;
mod token;
mod value;
pub(crate) type Span<'a> = LocatedSpan<&'a str>;
pub(crate) use token::Token;
pub(crate) use value::Value;
pub(crate) use value::ShaderType;
pub(crate) use value::ShaderStage;
use crate::error::ParsePresetError;
use crate::parse::preset::resolve_values;
use crate::parse::value::parse_preset;
use crate::slang::parse_preset;
use crate::ShaderPreset;
pub(crate) fn remove_if<T>(values: &mut Vec<T>, f: impl FnMut(&T) -> bool) -> Option<T> {
values.iter().position(f).map(|idx| values.remove(idx))
}
impl ShaderPreset {
/// Try to parse the shader preset at the given path.
pub fn try_parse(path: impl AsRef<Path>) -> Result<ShaderPreset, ParsePresetError> {

View file

@ -1,7 +1,7 @@
use librashader_common::ImageFormat;
use crate::parse::{remove_if, ShaderType};
use crate::parse::ShaderType;
use crate::parse::value::Value;
use crate::{ParameterConfig, Scale2D, Scaling, ShaderPassConfig, ShaderPath, ShaderPreset, TextureConfig};
use crate::{ParameterConfig, remove_if, Scale2D, Scaling, ShaderPassConfig, ShaderPath, ShaderPreset, TextureConfig};
pub fn resolve_values(mut values: Vec<Value>) -> ShaderPreset {
let textures: Vec<TextureConfig> = values

View file

@ -1,21 +1,6 @@
use crate::error::{ParseErrorKind, ParsePresetError};
use crate::parse::{remove_if, Span, Token};
use crate::{ScaleFactor, ScaleType};
use nom::bytes::complete::tag;
use nom::character::complete::digit1;
use nom::combinator::{eof, map_res};
use std::collections::VecDeque;
use nom::IResult;
use num_traits::cast::ToPrimitive;
use crate::parse::token::do_lex;
use librashader_common::{FilterMode, WrapMode};
use std::fs::File;
use std::io::Read;
use std::path::{Path, PathBuf};
use std::str::FromStr;
use librashader_common::{FilterMode, ImageFormat, WrapMode};
use std::path::PathBuf;
#[derive(Debug)]
pub enum ShaderStage {
@ -49,6 +34,7 @@ pub enum Value {
MipmapInput(i32, bool),
Alias(i32, String),
Parameter(String, f32),
FormatOverride(i32, ImageFormat),
Texture {
name: String,
filter_mode: FilterMode,
@ -75,548 +61,8 @@ impl Value {
Value::SrgbFramebuffer(i, _) => Some(*i),
Value::MipmapInput(i, _) => Some(*i),
Value::Alias(i, _) => Some(*i),
Value::FormatOverride(i, _) => Some(*i),
_ => None,
}
}
}
fn from_int(input: Span) -> Result<i32, ParsePresetError> {
// Presets like to commit ✨CRIMES✨ and end their lines with a ";".
// It's too hard to put this in the lexer because we can't tell between
// semicolon crimes or a valid param/texture name listing.
let to_parse = input.trim().trim_end_matches(";");
i32::from_str(to_parse)
.map_err(|_| ParsePresetError::ParserError {
offset: input.location_offset(),
row: input.location_line(),
col: input.get_column(),
kind: ParseErrorKind::Int,
})
.or_else(|e| {
// An even more egregious ✨CRIME✨ is using a float as a shader index.
let result = f32::from_str(to_parse).map_err(|_| e)?;
let result = result
.trunc()
.to_i32()
.ok_or(ParsePresetError::ParserError {
offset: input.location_offset(),
row: input.location_line(),
col: input.get_column(),
kind: ParseErrorKind::Int,
})?;
Ok(result)
})
}
fn from_ul(input: Span) -> Result<u32, ParsePresetError> {
// Presets like to commit ✨CRIMES✨ and end their lines with a ";".
// It's too hard to put this in the lexer because we can't tell between
// semicolon crimes or a valid param/texture name listing.
u32::from_str(input.trim().trim_end_matches(";")).map_err(|_| ParsePresetError::ParserError {
offset: input.location_offset(),
row: input.location_line(),
col: input.get_column(),
kind: ParseErrorKind::UnsignedInt,
})
}
fn from_float(input: Span) -> Result<f32, ParsePresetError> {
// Presets like to commit ✨CRIMES✨ and end their lines with a ";".
// It's too hard to put this in the lexer because we can't tell between
// semicolon crimes or a valid param/texture name listing.
f32::from_str(input.trim().trim_end_matches(";")).map_err(|_| ParsePresetError::ParserError {
offset: input.location_offset(),
row: input.location_line(),
col: input.get_column(),
kind: ParseErrorKind::Float,
})
}
fn from_bool(input: Span) -> Result<bool, ParsePresetError> {
if let Ok(i) = i32::from_str(input.trim()) {
return match i {
1 => Ok(true),
0 => Ok(false),
_ => Err(ParsePresetError::ParserError {
offset: input.location_offset(),
row: input.location_line(),
col: input.get_column(),
kind: ParseErrorKind::Bool,
}),
};
}
bool::from_str(input.trim()).map_err(|_| ParsePresetError::ParserError {
offset: input.location_offset(),
row: input.location_line(),
col: input.get_column(),
kind: ParseErrorKind::Bool,
})
}
fn parse_indexed_key<'a>(key: &'static str, input: Span<'a>) -> IResult<Span<'a>, i32> {
let (input, _) = tag(key)(input)?;
let (input, idx) = map_res(digit1, from_int)(input)?;
let (input, _) = eof(input)?;
Ok((input, idx))
}
pub const SHADER_MAX_REFERENCE_DEPTH: usize = 16;
fn load_child_reference_strings(
root_references: Vec<PathBuf>,
root_path: impl AsRef<Path>,
) -> Result<Vec<(PathBuf, String)>, ParsePresetError> {
let root_path = root_path.as_ref();
let mut reference_depth = 0;
let mut reference_strings: VecDeque<(PathBuf, String)> = VecDeque::new();
let root_references = vec![(root_path.to_path_buf(), root_references)];
let mut root_references = VecDeque::from(root_references);
// search needs to be depth first to allow for overrides.
while let Some((reference_root, referenced_paths)) = root_references.pop_front() {
if reference_depth > SHADER_MAX_REFERENCE_DEPTH {
return Err(ParsePresetError::ExceededReferenceDepth);
}
// enter the current root
reference_depth += 1;
// canonicalize current root
let reference_root = reference_root
.canonicalize()
.map_err(|e| ParsePresetError::IOError(reference_root.to_path_buf(), e))?;
// resolve all referenced paths against root
// println!("Resolving {referenced_paths:?} against {reference_root:?}.");
for path in referenced_paths {
let mut path = reference_root
.join(path.clone())
.canonicalize()
.map_err(|e| ParsePresetError::IOError(path.clone(), e))?;
// println!("Opening {:?}", path);
let mut reference_contents = String::new();
File::open(&path)
.map_err(|e| ParsePresetError::IOError(path.clone(), e))?
.read_to_string(&mut reference_contents)
.map_err(|e| ParsePresetError::IOError(path.clone(), e))?;
let mut new_tokens = do_lex(&reference_contents)?;
let new_references: Vec<PathBuf> = new_tokens
.drain_filter(|token| *token.key.fragment() == "#reference")
.map(|value| PathBuf::from(*value.value.fragment()))
.collect();
path.pop();
reference_strings.push_front((path.clone(), reference_contents));
if !new_references.is_empty() {
root_references.push_front((path, new_references));
}
}
}
Ok(reference_strings.into())
}
// todo: move this to slang
pub fn parse_preset(path: impl AsRef<Path>) -> Result<Vec<Value>, ParsePresetError> {
let path = path.as_ref();
let path = path
.canonicalize()
.map_err(|e| ParsePresetError::IOError(path.to_path_buf(), e))?;
let mut contents = String::new();
File::open(&path)
.and_then(|mut f| f.read_to_string(&mut contents))
.map_err(|e| ParsePresetError::IOError(path.to_path_buf(), e))?;
let tokens = super::token::do_lex(&contents)?;
parse_values(tokens, path)
}
// todo: move this to slang
pub fn parse_values(
mut tokens: Vec<Token>,
root_path: impl AsRef<Path>,
) -> Result<Vec<Value>, ParsePresetError> {
let mut root_path = root_path.as_ref().to_path_buf();
if root_path.is_relative() {
return Err(ParsePresetError::RootPathWasNotAbsolute);
}
if !root_path.is_dir() {
// we don't really care if this doesn't do anything because a non-canonical root path will
// fail at a later stage during resolution.
root_path.pop();
}
let references: Vec<PathBuf> = tokens
.drain_filter(|token| *token.key.fragment() == "#reference")
.map(|value| PathBuf::from(*value.value.fragment()))
.collect();
// unfortunately we need to lex twice because there's no way to know the references ahead of time.
let child_strings = load_child_reference_strings(references, &root_path)?;
let mut all_tokens: Vec<(&Path, Vec<Token>)> = Vec::new();
for (path, string) in child_strings.iter() {
// lex the child tokens
let mut tokens = do_lex(string.as_ref())?;
tokens.retain(|token| *token.key.fragment() != "#reference");
all_tokens.push((path.as_path(), tokens))
}
// load depth first, so all child tokens are first.
// Later tokens take precedence.
all_tokens.push((root_path.as_path(), tokens));
// collect all possible parameter names.
let mut parameter_names: Vec<&str> = Vec::new();
for (_, tokens) in all_tokens.iter_mut() {
for token in tokens.drain_filter(|token| *token.key.fragment() == "parameters") {
let parameter_name_string: &str = token.value.fragment();
for parameter_name in parameter_name_string.split(';') {
parameter_names.push(parameter_name);
}
}
}
// collect all possible texture names.
let mut texture_names: Vec<&str> = Vec::new();
for (_, tokens) in all_tokens.iter_mut() {
for token in tokens.drain_filter(|token| *token.key.fragment() == "textures") {
let texture_name_string: &str = token.value.fragment();
for texture_name in texture_name_string.split(';') {
texture_names.push(texture_name);
}
}
}
let mut values = Vec::new();
// resolve shader paths.
for (path, tokens) in all_tokens.iter_mut() {
for token in tokens.drain_filter(|token| parse_indexed_key("shader", token.key).is_ok()) {
let (_, index) = parse_indexed_key("shader", token.key).map_err(|e| match e {
nom::Err::Error(e) | nom::Err::Failure(e) => {
let input: Span = e.input;
ParsePresetError::ParserError {
offset: input.location_offset(),
row: input.location_line(),
col: input.get_column(),
kind: ParseErrorKind::Index("shader"),
}
}
_ => ParsePresetError::ParserError {
offset: 0,
row: 0,
col: 0,
kind: ParseErrorKind::Index("shader"),
},
})?;
let mut relative_path = path.to_path_buf();
relative_path.push(*token.value.fragment());
relative_path
.canonicalize()
.map_err(|e| ParsePresetError::IOError(relative_path.clone(), e))?;
values.push(Value::Shader(index, ShaderType::Slang, relative_path))
}
}
// resolve texture paths
let mut textures = Vec::new();
for (path, tokens) in all_tokens.iter_mut() {
for token in tokens.drain_filter(|token| texture_names.contains(token.key.fragment())) {
let mut relative_path = path.to_path_buf();
relative_path.push(*token.value.fragment());
relative_path
.canonicalize()
.map_err(|e| ParsePresetError::IOError(relative_path.clone(), e))?;
textures.push((token.key, relative_path))
}
}
let mut tokens: Vec<(&Path, Token)> = all_tokens
.into_iter()
.flat_map(|(p, token)| token.into_iter().map(move |t| (p, t)))
.collect();
for (texture, path) in textures {
let mipmap = remove_if(&mut tokens, |(_, t)| {
t.key.starts_with(*texture)
&& t.key.ends_with("_mipmap")
&& t.key.len() == texture.len() + "_mipmap".len()
})
.map_or_else(|| Ok(false), |(_, v)| from_bool(v.value))?;
let linear = remove_if(&mut tokens, |(_, t)| {
t.key.starts_with(*texture)
&& t.key.ends_with("_linear")
&& t.key.len() == texture.len() + "_linear".len()
})
.map_or_else(|| Ok(false), |(_, v)| from_bool(v.value))?;
let wrap_mode = remove_if(&mut tokens, |(_, t)| {
t.key.starts_with(*texture)
&& (t.key.ends_with("_wrap_mode") || t.key.ends_with("_repeat_mode"))
&& (t.key.len() == texture.len() + "_wrap_mode".len()
|| t.key.len() == texture.len() + "_repeat_mode".len())
})
// NOPANIC: infallible
.map_or_else(WrapMode::default, |(_, v)| {
WrapMode::from_str(&v.value).unwrap()
});
// This really isn't supported but crt-torridgristle uses this syntax.
// Again, don't know how this works in RA but RA's parser isn't as strict as ours.
let filter = remove_if(&mut tokens, |(_, t)| {
t.key.starts_with("filter_")
&& t.key.ends_with(*texture)
&& t.key.len() == "filter_".len() + texture.len()
})
// NOPANIC: infallible
.map_or(None, |(_, v)| Some(FilterMode::from_str(&v.value).unwrap()));
values.push(Value::Texture {
name: texture.to_string(),
filter_mode: filter.unwrap_or(if linear {
FilterMode::Linear
} else {
FilterMode::Nearest
}),
wrap_mode,
mipmap,
path,
})
}
let mut rest_tokens = Vec::new();
// hopefully no more textures left in the token tree
for (p, token) in tokens {
if parameter_names.contains(token.key.fragment()) {
let param_val = from_float(token.value)
// This is literally just to work around BEAM_PROFILE in crt-hyllian-sinc-glow.slangp
// which has ""0'.000000". This somehow works in RA because it defaults to 0, probably.
// This hack is only used for **known** parameter names. If we tried this for undeclared
// params (god help me), it would be pretty bad because we lose texture path fallback.
.unwrap_or(0.0);
values.push(Value::Parameter(
token.key.fragment().to_string(),
param_val,
));
continue;
}
if token.key.fragment() == &"shaders" {
let shader_count = from_int(token.value)?;
values.push(Value::ShaderCount(shader_count));
continue;
}
if token.key.fragment() == &"feedback_pass" {
let feedback_pass = from_int(token.value)?;
values.push(Value::FeedbackPass(feedback_pass));
continue;
}
if let Ok((_, idx)) = parse_indexed_key("filter_linear", token.key) {
let linear = from_bool(token.value)?;
values.push(Value::FilterMode(
idx,
if linear {
FilterMode::Linear
} else {
FilterMode::Nearest
},
));
continue;
}
if let Ok((_, idx)) = parse_indexed_key("wrap_mode", token.key) {
let wrap_mode = WrapMode::from_str(&token.value).unwrap();
values.push(Value::WrapMode(idx, wrap_mode));
continue;
}
// crt-geom uses repeat_mode...
if let Ok((_, idx)) = parse_indexed_key("repeat_mode", token.key) {
let wrap_mode = WrapMode::from_str(&token.value).unwrap();
values.push(Value::WrapMode(idx, wrap_mode));
continue;
}
// crt-royale uses 'texture_wrap_mode' instead of 'wrap_mode', I have no idea
// how this possibly could work in RA, but here it is..
if let Ok((_, idx)) = parse_indexed_key("texture_wrap_mode", token.key) {
let wrap_mode = WrapMode::from_str(&token.value).unwrap();
values.push(Value::WrapMode(idx, wrap_mode));
continue;
}
if let Ok((_, idx)) = parse_indexed_key("frame_count_mod", token.key) {
let frame_count_mod = from_ul(token.value)?;
values.push(Value::FrameCountMod(idx, frame_count_mod));
continue;
}
if let Ok((_, idx)) = parse_indexed_key("srgb_framebuffer", token.key) {
let enabled = from_bool(token.value)?;
values.push(Value::SrgbFramebuffer(idx, enabled));
continue;
}
if let Ok((_, idx)) = parse_indexed_key("float_framebuffer", token.key) {
let enabled = from_bool(token.value)?;
values.push(Value::FloatFramebuffer(idx, enabled));
continue;
}
if let Ok((_, idx)) = parse_indexed_key("mipmap_input", token.key) {
let enabled = from_bool(token.value)?;
values.push(Value::MipmapInput(idx, enabled));
continue;
}
// vector-glow-alt-render.slangp uses "mipmap" for pass 1, but "mipmap_input" for everything else.
if let Ok((_, idx)) = parse_indexed_key("mipmap", token.key) {
let enabled = from_bool(token.value)?;
values.push(Value::MipmapInput(idx, enabled));
continue;
}
if let Ok((_, idx)) = parse_indexed_key("alias", token.key) {
values.push(Value::Alias(idx, token.value.to_string()));
continue;
}
if let Ok((_, idx)) = parse_indexed_key("scale_type", token.key) {
let scale_type = ScaleType::from_str(token.value.trim())?;
values.push(Value::ScaleType(idx, scale_type));
continue;
}
if let Ok((_, idx)) = parse_indexed_key("scale_type_x", token.key) {
let scale_type = ScaleType::from_str(token.value.trim())?;
values.push(Value::ScaleTypeX(idx, scale_type));
continue;
}
if let Ok((_, idx)) = parse_indexed_key("scale_type_y", token.key) {
let scale_type = ScaleType::from_str(token.value.trim())?;
values.push(Value::ScaleTypeY(idx, scale_type));
continue;
}
rest_tokens.push((p, token))
}
let mut undeclared_textures = Vec::new();
for (path, token) in &rest_tokens {
if let Ok((_, idx)) = parse_indexed_key("scale", token.key) {
let scale = if values.iter().any(|t| matches!(*t, Value::ScaleType(match_idx, ScaleType::Absolute) if match_idx == idx)) {
let scale = from_int(token.value)?;
ScaleFactor::Absolute(scale)
} else {
let scale = from_float(token.value)?;
ScaleFactor::Float(scale)
};
values.push(Value::Scale(idx, scale));
continue;
}
if let Ok((_, idx)) = parse_indexed_key("scale_x", token.key) {
let scale = if values.iter().any(|t| matches!(*t, Value::ScaleType(match_idx, ScaleType::Absolute) | Value::ScaleTypeX(match_idx, ScaleType::Absolute) if match_idx == idx)) {
let scale = from_int(token.value)?;
ScaleFactor::Absolute(scale)
} else {
let scale = from_float(token.value)?;
ScaleFactor::Float(scale)
};
values.push(Value::ScaleX(idx, scale));
continue;
}
if let Ok((_, idx)) = parse_indexed_key("scale_y", token.key) {
let scale = if values.iter().any(|t| matches!(*t, Value::ScaleType(match_idx, ScaleType::Absolute) | Value::ScaleTypeY(match_idx, ScaleType::Absolute) if match_idx == idx)) {
let scale = from_int(token.value)?;
ScaleFactor::Absolute(scale)
} else {
let scale = from_float(token.value)?;
ScaleFactor::Float(scale)
};
values.push(Value::ScaleY(idx, scale));
continue;
}
// handle undeclared parameters after parsing everything else as a last resort.
if let Ok(param_val) = from_float(token.value) {
values.push(Value::Parameter(
token.key.fragment().to_string(),
param_val,
));
}
// very last resort, assume undeclared texture (must have extension)
else if Path::new(token.value.fragment()).extension().is_some()
&& ["_mipmap", "_linear", "_wrap_mode", "_repeat_mode"]
.iter()
.all(|k| !token.key.ends_with(k))
{
let mut relative_path = path.to_path_buf();
relative_path.push(*token.value.fragment());
relative_path
.canonicalize()
.map_err(|e| ParsePresetError::IOError(relative_path.clone(), e))?;
undeclared_textures.push((token.key, relative_path));
}
// we tried our best
}
// Since there are undeclared textures we need to deal with potential mipmap information.
for (texture, path) in undeclared_textures {
let mipmap = remove_if(&mut rest_tokens, |(_, t)| {
t.key.starts_with(*texture)
&& t.key.ends_with("_mipmap")
&& t.key.len() == texture.len() + "_mipmap".len()
})
.map_or_else(|| Ok(false), |(_, v)| from_bool(v.value))?;
let linear = remove_if(&mut rest_tokens, |(_, t)| {
t.key.starts_with(*texture)
&& t.key.ends_with("_linear")
&& t.key.len() == texture.len() + "_linear".len()
})
.map_or_else(|| Ok(false), |(_, v)| from_bool(v.value))?;
let wrap_mode = remove_if(&mut rest_tokens, |(_, t)| {
t.key.starts_with(*texture)
&& (t.key.ends_with("_wrap_mode") || t.key.ends_with("_repeat_mode"))
&& (t.key.len() == texture.len() + "_wrap_mode".len()
|| t.key.len() == texture.len() + "_repeat_mode".len())
})
// NOPANIC: infallible
.map_or_else(WrapMode::default, |(_, v)| {
WrapMode::from_str(&v.value).unwrap()
});
values.push(Value::Texture {
name: texture.to_string(),
filter_mode: if linear {
FilterMode::Linear
} else {
FilterMode::Nearest
},
wrap_mode,
mipmap,
path,
})
}
// all tokens should be ok to process now.
Ok(values)
}
#[cfg(test)]
mod test {
use crate::parse::value::parse_preset;
use std::path::PathBuf;
#[test]
pub fn parse_basic() {
let root =
PathBuf::from("../test/slang-shaders/bezel/Mega_Bezel/Presets/Base_CRT_Presets/MBZ__3__STD__MEGATRON-NTSC.slangp");
let basic = parse_preset(root);
eprintln!("{basic:?}");
assert!(basic.is_ok());
}
}

View file

@ -1,12 +1,11 @@
use std::error::Error;
use std::fs::File;
use std::io::Read;
use std::path::{Path, PathBuf};
use std::str::FromStr;
use bml::BmlNode;
use librashader_common::FilterMode;
use librashader_common::{FilterMode, ImageFormat, WrapMode};
use crate::parse::{ShaderStage, ShaderType, Value};
use crate::ParsePresetError;
use crate::{ParseErrorKind, ParsePresetError};
fn parse_bml_node(path: impl AsRef<Path>) -> Result<BmlNode, ParsePresetError> {
let path = path.as_ref();
@ -34,15 +33,58 @@ fn parse_values(node: &BmlNode) -> Result<Vec<Value>, ParsePresetError>{
// NOPANIC: infallible
values.push(Value::FilterMode(index as i32, FilterMode::from_str(filter.value().trim()).unwrap()))
}
if let Some(wrap) = programs.named("wrap").next() {
values.push(Value::WrapMode(index as i32, WrapMode::from_str(wrap.value().trim()).unwrap()))
}
if let Some(format) = programs.named("format").next() {
let format = match format.value() {
"rgba8" => ImageFormat::R8G8B8A8Unorm,
"rgb10a2" => ImageFormat::A2B10G10R10UnormPack32,
"rgba16" => ImageFormat::R16G16B16A16Sint,
"rgba16f" => ImageFormat::R16G16B16A16Sfloat,
"rgba32f" => ImageFormat::R32G32B32A32Sfloat,
// srgb extension
"srgb8" => ImageFormat::R8G8B8A8Srgb,
// don't support rgba12
_ => ImageFormat::Unknown,
};
values.push(Value::FormatOverride(index as i32, format));
}
if let Some(modulo) = programs.named("modulo").next() {
let modulo = u32::from_str(modulo.value())
.map_err(|_| ParsePresetError::ParserError {
offset: index,
row: 0,
col: 0,
kind: ParseErrorKind::UnsignedInt,
})?;
values.push(Value::FrameCountMod(index as i32, modulo))
}
if let Some(vertex) = programs.named("vertex").next() {
values.push(Value::Shader(index as i32, ShaderType::Quark(ShaderStage::Vertex), PathBuf::from_str(vertex.value().trim())
.expect("Infallible")))
let path = PathBuf::from_str(vertex.value().trim())
.expect("Infallible");
let path = path.canonicalize()
.map_err(|e| ParsePresetError::IOError(path.to_path_buf(), e))?;
values.push(Value::Shader(index as i32, ShaderType::Quark(ShaderStage::Vertex), path))
}
if let Some(fragment) = programs.named("fragment").next() {
values.push(Value::Shader(index as i32, ShaderType::Quark(ShaderStage::Fragment), PathBuf::from_str(fragment.value().trim())
.expect("Infallible")))
let path = PathBuf::from_str(fragment.value().trim())
.expect("Infallible");
let path = path.canonicalize()
.map_err(|e| ParsePresetError::IOError(path.to_path_buf(), e))?;
values.push(Value::Shader(index as i32, ShaderType::Quark(ShaderStage::Fragment), path))
}
}

View file

@ -0,0 +1,9 @@
mod parse;
mod token;
pub(crate) type Span<'a> = LocatedSpan<&'a str>;
use nom_locate::LocatedSpan;
pub use parse::parse_preset;
pub use parse::parse_values;

View file

@ -0,0 +1,554 @@
use nom::bytes::complete::tag;
use nom::character::complete::digit1;
use nom::combinator::{eof, map_res};
use std::path::{Path, PathBuf};
use std::fs::File;
use librashader_common::{FilterMode, WrapMode};
use std::collections::VecDeque;
use std::str::FromStr;
use std::io::Read;
use nom::IResult;
use num_traits::ToPrimitive;
use crate::{ParseErrorKind, ParsePresetError, remove_if, ScaleFactor, ScaleType};
use crate::parse::{ShaderType, Value};
use crate::slang::Span;
use crate::slang::token::{do_lex, Token};
fn from_int(input: Span) -> Result<i32, ParsePresetError> {
// Presets like to commit ✨CRIMES✨ and end their lines with a ";".
// It's too hard to put this in the lexer because we can't tell between
// semicolon crimes or a valid param/texture name listing.
let to_parse = input.trim().trim_end_matches(";");
i32::from_str(to_parse)
.map_err(|_| ParsePresetError::ParserError {
offset: input.location_offset(),
row: input.location_line(),
col: input.get_column(),
kind: ParseErrorKind::Int,
})
.or_else(|e| {
// An even more egregious ✨CRIME✨ is using a float as a shader index.
let result = f32::from_str(to_parse).map_err(|_| e)?;
let result = result
.trunc()
.to_i32()
.ok_or(ParsePresetError::ParserError {
offset: input.location_offset(),
row: input.location_line(),
col: input.get_column(),
kind: ParseErrorKind::Int,
})?;
Ok(result)
})
}
fn from_ul(input: Span) -> Result<u32, ParsePresetError> {
// Presets like to commit ✨CRIMES✨ and end their lines with a ";".
// It's too hard to put this in the lexer because we can't tell between
// semicolon crimes or a valid param/texture name listing.
u32::from_str(input.trim().trim_end_matches(";")).map_err(|_| ParsePresetError::ParserError {
offset: input.location_offset(),
row: input.location_line(),
col: input.get_column(),
kind: ParseErrorKind::UnsignedInt,
})
}
fn from_float(input: Span) -> Result<f32, ParsePresetError> {
// Presets like to commit ✨CRIMES✨ and end their lines with a ";".
// It's too hard to put this in the lexer because we can't tell between
// semicolon crimes or a valid param/texture name listing.
f32::from_str(input.trim().trim_end_matches(";")).map_err(|_| ParsePresetError::ParserError {
offset: input.location_offset(),
row: input.location_line(),
col: input.get_column(),
kind: ParseErrorKind::Float,
})
}
fn from_bool(input: Span) -> Result<bool, ParsePresetError> {
if let Ok(i) = i32::from_str(input.trim()) {
return match i {
1 => Ok(true),
0 => Ok(false),
_ => Err(ParsePresetError::ParserError {
offset: input.location_offset(),
row: input.location_line(),
col: input.get_column(),
kind: ParseErrorKind::Bool,
}),
};
}
bool::from_str(input.trim()).map_err(|_| ParsePresetError::ParserError {
offset: input.location_offset(),
row: input.location_line(),
col: input.get_column(),
kind: ParseErrorKind::Bool,
})
}
fn parse_indexed_key<'a>(key: &'static str, input: Span<'a>) -> IResult<Span<'a>, i32> {
let (input, _) = tag(key)(input)?;
let (input, idx) = map_res(digit1, from_int)(input)?;
let (input, _) = eof(input)?;
Ok((input, idx))
}
pub const SHADER_MAX_REFERENCE_DEPTH: usize = 16;
fn load_child_reference_strings(
root_references: Vec<PathBuf>,
root_path: impl AsRef<Path>,
) -> Result<Vec<(PathBuf, String)>, ParsePresetError> {
let root_path = root_path.as_ref();
let mut reference_depth = 0;
let mut reference_strings: VecDeque<(PathBuf, String)> = VecDeque::new();
let root_references = vec![(root_path.to_path_buf(), root_references)];
let mut root_references = VecDeque::from(root_references);
// search needs to be depth first to allow for overrides.
while let Some((reference_root, referenced_paths)) = root_references.pop_front() {
if reference_depth > SHADER_MAX_REFERENCE_DEPTH {
return Err(ParsePresetError::ExceededReferenceDepth);
}
// enter the current root
reference_depth += 1;
// canonicalize current root
let reference_root = reference_root
.canonicalize()
.map_err(|e| ParsePresetError::IOError(reference_root.to_path_buf(), e))?;
// resolve all referenced paths against root
// println!("Resolving {referenced_paths:?} against {reference_root:?}.");
for path in referenced_paths {
let mut path = reference_root
.join(path.clone())
.canonicalize()
.map_err(|e| ParsePresetError::IOError(path.clone(), e))?;
// println!("Opening {:?}", path);
let mut reference_contents = String::new();
File::open(&path)
.map_err(|e| ParsePresetError::IOError(path.clone(), e))?
.read_to_string(&mut reference_contents)
.map_err(|e| ParsePresetError::IOError(path.clone(), e))?;
let mut new_tokens = do_lex(&reference_contents)?;
let new_references: Vec<PathBuf> = new_tokens
.drain_filter(|token| *token.key.fragment() == "#reference")
.map(|value| PathBuf::from(*value.value.fragment()))
.collect();
path.pop();
reference_strings.push_front((path.clone(), reference_contents));
if !new_references.is_empty() {
root_references.push_front((path, new_references));
}
}
}
Ok(reference_strings.into())
}
pub fn parse_preset(path: impl AsRef<Path>) -> Result<Vec<Value>, ParsePresetError> {
let path = path.as_ref();
let path = path
.canonicalize()
.map_err(|e| ParsePresetError::IOError(path.to_path_buf(), e))?;
let mut contents = String::new();
File::open(&path)
.and_then(|mut f| f.read_to_string(&mut contents))
.map_err(|e| ParsePresetError::IOError(path.to_path_buf(), e))?;
let tokens = do_lex(&contents)?;
parse_values(tokens, path)
}
pub fn parse_values(
mut tokens: Vec<Token>,
root_path: impl AsRef<Path>,
) -> Result<Vec<Value>, ParsePresetError> {
let mut root_path = root_path.as_ref().to_path_buf();
if root_path.is_relative() {
return Err(ParsePresetError::RootPathWasNotAbsolute);
}
if !root_path.is_dir() {
// we don't really care if this doesn't do anything because a non-canonical root path will
// fail at a later stage during resolution.
root_path.pop();
}
let references: Vec<PathBuf> = tokens
.drain_filter(|token| *token.key.fragment() == "#reference")
.map(|value| PathBuf::from(*value.value.fragment()))
.collect();
// unfortunately we need to lex twice because there's no way to know the references ahead of time.
let child_strings = load_child_reference_strings(references, &root_path)?;
let mut all_tokens: Vec<(&Path, Vec<Token>)> = Vec::new();
for (path, string) in child_strings.iter() {
// lex the child tokens
let mut tokens = do_lex(string.as_ref())?;
tokens.retain(|token| *token.key.fragment() != "#reference");
all_tokens.push((path.as_path(), tokens))
}
// load depth first, so all child tokens are first.
// Later tokens take precedence.
all_tokens.push((root_path.as_path(), tokens));
// collect all possible parameter names.
let mut parameter_names: Vec<&str> = Vec::new();
for (_, tokens) in all_tokens.iter_mut() {
for token in tokens.drain_filter(|token| *token.key.fragment() == "parameters") {
let parameter_name_string: &str = token.value.fragment();
for parameter_name in parameter_name_string.split(';') {
parameter_names.push(parameter_name);
}
}
}
// collect all possible texture names.
let mut texture_names: Vec<&str> = Vec::new();
for (_, tokens) in all_tokens.iter_mut() {
for token in tokens.drain_filter(|token| *token.key.fragment() == "textures") {
let texture_name_string: &str = token.value.fragment();
for texture_name in texture_name_string.split(';') {
texture_names.push(texture_name);
}
}
}
let mut values = Vec::new();
// resolve shader paths.
for (path, tokens) in all_tokens.iter_mut() {
for token in tokens.drain_filter(|token| parse_indexed_key("shader", token.key).is_ok()) {
let (_, index) = parse_indexed_key("shader", token.key).map_err(|e| match e {
nom::Err::Error(e) | nom::Err::Failure(e) => {
let input: Span = e.input;
ParsePresetError::ParserError {
offset: input.location_offset(),
row: input.location_line(),
col: input.get_column(),
kind: ParseErrorKind::Index("shader"),
}
}
_ => ParsePresetError::ParserError {
offset: 0,
row: 0,
col: 0,
kind: ParseErrorKind::Index("shader"),
},
})?;
let mut relative_path = path.to_path_buf();
relative_path.push(*token.value.fragment());
relative_path
.canonicalize()
.map_err(|e| ParsePresetError::IOError(relative_path.clone(), e))?;
values.push(Value::Shader(index, ShaderType::Slang, relative_path))
}
}
// resolve texture paths
let mut textures = Vec::new();
for (path, tokens) in all_tokens.iter_mut() {
for token in tokens.drain_filter(|token| texture_names.contains(token.key.fragment())) {
let mut relative_path = path.to_path_buf();
relative_path.push(*token.value.fragment());
relative_path
.canonicalize()
.map_err(|e| ParsePresetError::IOError(relative_path.clone(), e))?;
textures.push((token.key, relative_path))
}
}
let mut tokens: Vec<(&Path, Token)> = all_tokens
.into_iter()
.flat_map(|(p, token)| token.into_iter().map(move |t| (p, t)))
.collect();
for (texture, path) in textures {
let mipmap = remove_if(&mut tokens, |(_, t)| {
t.key.starts_with(*texture)
&& t.key.ends_with("_mipmap")
&& t.key.len() == texture.len() + "_mipmap".len()
})
.map_or_else(|| Ok(false), |(_, v)| from_bool(v.value))?;
let linear = remove_if(&mut tokens, |(_, t)| {
t.key.starts_with(*texture)
&& t.key.ends_with("_linear")
&& t.key.len() == texture.len() + "_linear".len()
})
.map_or_else(|| Ok(false), |(_, v)| from_bool(v.value))?;
let wrap_mode = remove_if(&mut tokens, |(_, t)| {
t.key.starts_with(*texture)
&& (t.key.ends_with("_wrap_mode") || t.key.ends_with("_repeat_mode"))
&& (t.key.len() == texture.len() + "_wrap_mode".len()
|| t.key.len() == texture.len() + "_repeat_mode".len())
})
// NOPANIC: infallible
.map_or_else(WrapMode::default, |(_, v)| {
WrapMode::from_str(&v.value).unwrap()
});
// This really isn't supported but crt-torridgristle uses this syntax.
// Again, don't know how this works in RA but RA's parser isn't as strict as ours.
let filter = remove_if(&mut tokens, |(_, t)| {
t.key.starts_with("filter_")
&& t.key.ends_with(*texture)
&& t.key.len() == "filter_".len() + texture.len()
})
// NOPANIC: infallible
.map_or(None, |(_, v)| Some(FilterMode::from_str(&v.value).unwrap()));
values.push(Value::Texture {
name: texture.to_string(),
filter_mode: filter.unwrap_or(if linear {
FilterMode::Linear
} else {
FilterMode::Nearest
}),
wrap_mode,
mipmap,
path,
})
}
let mut rest_tokens = Vec::new();
// hopefully no more textures left in the token tree
for (p, token) in tokens {
if parameter_names.contains(token.key.fragment()) {
let param_val = from_float(token.value)
// This is literally just to work around BEAM_PROFILE in crt-hyllian-sinc-glow.slangp
// which has ""0'.000000". This somehow works in RA because it defaults to 0, probably.
// This hack is only used for **known** parameter names. If we tried this for undeclared
// params (god help me), it would be pretty bad because we lose texture path fallback.
.unwrap_or(0.0);
values.push(Value::Parameter(
token.key.fragment().to_string(),
param_val,
));
continue;
}
if token.key.fragment() == &"shaders" {
let shader_count = from_int(token.value)?;
values.push(Value::ShaderCount(shader_count));
continue;
}
if token.key.fragment() == &"feedback_pass" {
let feedback_pass = from_int(token.value)?;
values.push(Value::FeedbackPass(feedback_pass));
continue;
}
if let Ok((_, idx)) = parse_indexed_key("filter_linear", token.key) {
let linear = from_bool(token.value)?;
values.push(Value::FilterMode(
idx,
if linear {
FilterMode::Linear
} else {
FilterMode::Nearest
},
));
continue;
}
if let Ok((_, idx)) = parse_indexed_key("wrap_mode", token.key) {
let wrap_mode = WrapMode::from_str(&token.value).unwrap();
values.push(Value::WrapMode(idx, wrap_mode));
continue;
}
// crt-geom uses repeat_mode...
if let Ok((_, idx)) = parse_indexed_key("repeat_mode", token.key) {
let wrap_mode = WrapMode::from_str(&token.value).unwrap();
values.push(Value::WrapMode(idx, wrap_mode));
continue;
}
// crt-royale uses 'texture_wrap_mode' instead of 'wrap_mode', I have no idea
// how this possibly could work in RA, but here it is..
if let Ok((_, idx)) = parse_indexed_key("texture_wrap_mode", token.key) {
let wrap_mode = WrapMode::from_str(&token.value).unwrap();
values.push(Value::WrapMode(idx, wrap_mode));
continue;
}
if let Ok((_, idx)) = parse_indexed_key("frame_count_mod", token.key) {
let frame_count_mod = from_ul(token.value)?;
values.push(Value::FrameCountMod(idx, frame_count_mod));
continue;
}
if let Ok((_, idx)) = parse_indexed_key("srgb_framebuffer", token.key) {
let enabled = from_bool(token.value)?;
values.push(Value::SrgbFramebuffer(idx, enabled));
continue;
}
if let Ok((_, idx)) = parse_indexed_key("float_framebuffer", token.key) {
let enabled = from_bool(token.value)?;
values.push(Value::FloatFramebuffer(idx, enabled));
continue;
}
if let Ok((_, idx)) = parse_indexed_key("mipmap_input", token.key) {
let enabled = from_bool(token.value)?;
values.push(Value::MipmapInput(idx, enabled));
continue;
}
// vector-glow-alt-render.slangp uses "mipmap" for pass 1, but "mipmap_input" for everything else.
if let Ok((_, idx)) = parse_indexed_key("mipmap", token.key) {
let enabled = from_bool(token.value)?;
values.push(Value::MipmapInput(idx, enabled));
continue;
}
if let Ok((_, idx)) = parse_indexed_key("alias", token.key) {
values.push(Value::Alias(idx, token.value.to_string()));
continue;
}
if let Ok((_, idx)) = parse_indexed_key("scale_type", token.key) {
let scale_type = ScaleType::from_str(token.value.trim())?;
values.push(Value::ScaleType(idx, scale_type));
continue;
}
if let Ok((_, idx)) = parse_indexed_key("scale_type_x", token.key) {
let scale_type = ScaleType::from_str(token.value.trim())?;
values.push(Value::ScaleTypeX(idx, scale_type));
continue;
}
if let Ok((_, idx)) = parse_indexed_key("scale_type_y", token.key) {
let scale_type = ScaleType::from_str(token.value.trim())?;
values.push(Value::ScaleTypeY(idx, scale_type));
continue;
}
rest_tokens.push((p, token))
}
let mut undeclared_textures = Vec::new();
for (path, token) in &rest_tokens {
if let Ok((_, idx)) = parse_indexed_key("scale", token.key) {
let scale = if values.iter().any(|t| matches!(*t, Value::ScaleType(match_idx, ScaleType::Absolute) if match_idx == idx)) {
let scale = from_int(token.value)?;
ScaleFactor::Absolute(scale)
} else {
let scale = from_float(token.value)?;
ScaleFactor::Float(scale)
};
values.push(Value::Scale(idx, scale));
continue;
}
if let Ok((_, idx)) = parse_indexed_key("scale_x", token.key) {
let scale = if values.iter().any(|t| matches!(*t, Value::ScaleType(match_idx, ScaleType::Absolute) | Value::ScaleTypeX(match_idx, ScaleType::Absolute) if match_idx == idx)) {
let scale = from_int(token.value)?;
ScaleFactor::Absolute(scale)
} else {
let scale = from_float(token.value)?;
ScaleFactor::Float(scale)
};
values.push(Value::ScaleX(idx, scale));
continue;
}
if let Ok((_, idx)) = parse_indexed_key("scale_y", token.key) {
let scale = if values.iter().any(|t| matches!(*t, Value::ScaleType(match_idx, ScaleType::Absolute) | Value::ScaleTypeY(match_idx, ScaleType::Absolute) if match_idx == idx)) {
let scale = from_int(token.value)?;
ScaleFactor::Absolute(scale)
} else {
let scale = from_float(token.value)?;
ScaleFactor::Float(scale)
};
values.push(Value::ScaleY(idx, scale));
continue;
}
// handle undeclared parameters after parsing everything else as a last resort.
if let Ok(param_val) = from_float(token.value) {
values.push(Value::Parameter(
token.key.fragment().to_string(),
param_val,
));
}
// very last resort, assume undeclared texture (must have extension)
else if Path::new(token.value.fragment()).extension().is_some()
&& ["_mipmap", "_linear", "_wrap_mode", "_repeat_mode"]
.iter()
.all(|k| !token.key.ends_with(k))
{
let mut relative_path = path.to_path_buf();
relative_path.push(*token.value.fragment());
relative_path
.canonicalize()
.map_err(|e| ParsePresetError::IOError(relative_path.clone(), e))?;
undeclared_textures.push((token.key, relative_path));
}
// we tried our best
}
// Since there are undeclared textures we need to deal with potential mipmap information.
for (texture, path) in undeclared_textures {
let mipmap = remove_if(&mut rest_tokens, |(_, t)| {
t.key.starts_with(*texture)
&& t.key.ends_with("_mipmap")
&& t.key.len() == texture.len() + "_mipmap".len()
})
.map_or_else(|| Ok(false), |(_, v)| from_bool(v.value))?;
let linear = remove_if(&mut rest_tokens, |(_, t)| {
t.key.starts_with(*texture)
&& t.key.ends_with("_linear")
&& t.key.len() == texture.len() + "_linear".len()
})
.map_or_else(|| Ok(false), |(_, v)| from_bool(v.value))?;
let wrap_mode = remove_if(&mut rest_tokens, |(_, t)| {
t.key.starts_with(*texture)
&& (t.key.ends_with("_wrap_mode") || t.key.ends_with("_repeat_mode"))
&& (t.key.len() == texture.len() + "_wrap_mode".len()
|| t.key.len() == texture.len() + "_repeat_mode".len())
})
// NOPANIC: infallible
.map_or_else(WrapMode::default, |(_, v)| {
WrapMode::from_str(&v.value).unwrap()
});
values.push(Value::Texture {
name: texture.to_string(),
filter_mode: if linear {
FilterMode::Linear
} else {
FilterMode::Nearest
},
wrap_mode,
mipmap,
path,
})
}
// all tokens should be ok to process now.
Ok(values)
}
#[cfg(test)]
mod test {
use crate::slang::parse_preset;
use std::path::PathBuf;
#[test]
pub fn parse_basic() {
let root =
PathBuf::from("../test/slang-shaders/bezel/Mega_Bezel/Presets/Base_CRT_Presets/MBZ__3__STD__MEGATRON-NTSC.slangp");
let basic = parse_preset(root);
eprintln!("{basic:?}");
assert!(basic.is_ok());
}
}

View file

@ -1,5 +1,4 @@
use crate::error::ParsePresetError;
use crate::parse::Span;
use nom::branch::alt;
use nom::bytes::complete::{is_not, take_until};
use nom::character::complete::{char, line_ending, multispace1, not_line_ending};
@ -12,6 +11,7 @@ use nom::{
bytes::complete::tag, character::complete::multispace0, IResult, InputIter, InputLength,
InputTake,
};
use crate::slang::Span;
#[derive(Debug)]
pub struct Token<'a> {
@ -155,7 +155,7 @@ pub fn do_lex(input: &str) -> Result<Vec<Token>, ParsePresetError> {
#[cfg(test)]
mod test {
use crate::parse::token::{do_lex, single_comment};
use crate::slang::token::{do_lex, single_comment};
#[test]
fn parses_single_line_comment() {

@ -1 +1 @@
Subproject commit a6e11453ad8c62931c62eeb79d51c70887b40bba
Subproject commit e04aa575febae5927db6b445a3c47b8f4221df78