2022-10-20 16:59:15 +11:00
|
|
|
use crate::error::{ParseErrorKind, ParsePresetError};
|
2024-02-10 09:48:20 +11:00
|
|
|
use crate::parse::{remove_if, Span, Token};
|
2022-11-14 16:14:05 +11:00
|
|
|
use crate::{ScaleFactor, ScaleType};
|
2022-10-20 13:47:43 +11:00
|
|
|
use nom::bytes::complete::tag;
|
|
|
|
use nom::character::complete::digit1;
|
|
|
|
use nom::combinator::{eof, map_res};
|
2024-02-09 18:07:02 +11:00
|
|
|
use std::collections::VecDeque;
|
2022-10-21 14:05:56 +11:00
|
|
|
|
2022-10-20 13:47:43 +11:00
|
|
|
use nom::IResult;
|
2022-11-21 18:13:10 +11:00
|
|
|
use num_traits::cast::ToPrimitive;
|
|
|
|
|
2022-10-21 14:05:56 +11:00
|
|
|
use crate::parse::token::do_lex;
|
2022-11-22 08:21:50 +11:00
|
|
|
use librashader_common::{FilterMode, WrapMode};
|
2022-10-20 13:47:43 +11:00
|
|
|
use std::fs::File;
|
|
|
|
use std::io::Read;
|
|
|
|
use std::path::{Path, PathBuf};
|
|
|
|
use std::str::FromStr;
|
2024-02-15 09:54:49 +11:00
|
|
|
use librashader_common::map::FastHashMap;
|
2022-10-20 13:47:43 +11:00
|
|
|
|
2024-02-10 09:48:20 +11:00
|
|
|
use crate::context::{apply_context, WildcardContext};
|
2024-02-11 10:54:57 +11:00
|
|
|
use crate::extract_if::MakeExtractIf;
|
2024-02-03 18:40:32 +11:00
|
|
|
|
2022-10-20 13:47:43 +11:00
|
|
|
#[derive(Debug)]
|
|
|
|
pub enum Value {
|
|
|
|
ShaderCount(i32),
|
2022-10-20 17:37:56 +11:00
|
|
|
FeedbackPass(i32),
|
2022-10-20 13:47:43 +11:00
|
|
|
Shader(i32, PathBuf),
|
|
|
|
ScaleX(i32, ScaleFactor),
|
|
|
|
ScaleY(i32, ScaleFactor),
|
|
|
|
Scale(i32, ScaleFactor),
|
|
|
|
ScaleType(i32, ScaleType),
|
2022-10-21 14:04:29 +11:00
|
|
|
ScaleTypeX(i32, ScaleType),
|
|
|
|
ScaleTypeY(i32, ScaleType),
|
2022-10-20 13:47:43 +11:00
|
|
|
FilterMode(i32, FilterMode),
|
2022-10-20 17:26:21 +11:00
|
|
|
WrapMode(i32, WrapMode),
|
|
|
|
FrameCountMod(i32, u32),
|
2022-10-20 13:47:43 +11:00
|
|
|
FloatFramebuffer(i32, bool),
|
|
|
|
SrgbFramebuffer(i32, bool),
|
|
|
|
MipmapInput(i32, bool),
|
2022-10-20 16:59:15 +11:00
|
|
|
Alias(i32, String),
|
2022-10-20 13:47:43 +11:00
|
|
|
Parameter(String, f32),
|
2022-10-20 16:59:15 +11:00
|
|
|
Texture {
|
|
|
|
name: String,
|
|
|
|
filter_mode: FilterMode,
|
|
|
|
wrap_mode: WrapMode,
|
|
|
|
mipmap: bool,
|
|
|
|
path: PathBuf,
|
|
|
|
},
|
2022-10-20 13:47:43 +11:00
|
|
|
}
|
|
|
|
|
2022-10-21 13:52:34 +11:00
|
|
|
impl Value {
|
|
|
|
pub(crate) fn shader_index(&self) -> Option<i32> {
|
|
|
|
match self {
|
|
|
|
Value::Shader(i, _) => Some(*i),
|
|
|
|
Value::ScaleX(i, _) => Some(*i),
|
|
|
|
Value::ScaleY(i, _) => Some(*i),
|
|
|
|
Value::Scale(i, _) => Some(*i),
|
|
|
|
Value::ScaleType(i, _) => Some(*i),
|
2022-10-21 14:04:29 +11:00
|
|
|
Value::ScaleTypeX(i, _) => Some(*i),
|
|
|
|
Value::ScaleTypeY(i, _) => Some(*i),
|
2022-10-21 13:52:34 +11:00
|
|
|
Value::FilterMode(i, _) => Some(*i),
|
2022-10-21 14:05:56 +11:00
|
|
|
Value::WrapMode(i, _) => Some(*i),
|
|
|
|
Value::FrameCountMod(i, _) => Some(*i),
|
2022-10-21 13:52:34 +11:00
|
|
|
Value::FloatFramebuffer(i, _) => Some(*i),
|
|
|
|
Value::SrgbFramebuffer(i, _) => Some(*i),
|
|
|
|
Value::MipmapInput(i, _) => Some(*i),
|
|
|
|
Value::Alias(i, _) => Some(*i),
|
2022-10-21 14:05:56 +11:00
|
|
|
_ => None,
|
2022-10-21 13:52:34 +11:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-10-20 16:59:15 +11:00
|
|
|
fn from_int(input: Span) -> Result<i32, ParsePresetError> {
|
2023-02-08 19:05:55 +11:00
|
|
|
// Presets like to commit ✨CRIMES✨ and end their lines with a ";".
|
|
|
|
// It's too hard to put this in the lexer because we can't tell between
|
|
|
|
// semicolon crimes or a valid param/texture name listing.
|
|
|
|
let to_parse = input.trim().trim_end_matches(";");
|
|
|
|
i32::from_str(to_parse)
|
2022-11-22 08:21:50 +11:00
|
|
|
.map_err(|_| ParsePresetError::ParserError {
|
2022-10-21 14:05:56 +11:00
|
|
|
offset: input.location_offset(),
|
|
|
|
row: input.location_line(),
|
|
|
|
col: input.get_column(),
|
|
|
|
kind: ParseErrorKind::Int,
|
2022-11-22 08:21:50 +11:00
|
|
|
})
|
|
|
|
.or_else(|e| {
|
2023-02-08 19:05:55 +11:00
|
|
|
// An even more egregious ✨CRIME✨ is using a float as a shader index.
|
|
|
|
let result = f32::from_str(to_parse).map_err(|_| e)?;
|
2022-11-22 08:21:50 +11:00
|
|
|
let result = result
|
|
|
|
.trunc()
|
|
|
|
.to_i32()
|
|
|
|
.ok_or(ParsePresetError::ParserError {
|
|
|
|
offset: input.location_offset(),
|
|
|
|
row: input.location_line(),
|
|
|
|
col: input.get_column(),
|
|
|
|
kind: ParseErrorKind::Int,
|
|
|
|
})?;
|
|
|
|
Ok(result)
|
|
|
|
})
|
2022-10-20 16:59:15 +11:00
|
|
|
}
|
|
|
|
|
2022-10-20 17:26:21 +11:00
|
|
|
fn from_ul(input: Span) -> Result<u32, ParsePresetError> {
|
2023-02-08 19:05:55 +11:00
|
|
|
// Presets like to commit ✨CRIMES✨ and end their lines with a ";".
|
|
|
|
// It's too hard to put this in the lexer because we can't tell between
|
|
|
|
// semicolon crimes or a valid param/texture name listing.
|
|
|
|
u32::from_str(input.trim().trim_end_matches(";")).map_err(|_| ParsePresetError::ParserError {
|
2022-10-20 16:59:15 +11:00
|
|
|
offset: input.location_offset(),
|
|
|
|
row: input.location_line(),
|
|
|
|
col: input.get_column(),
|
2022-10-20 17:26:21 +11:00
|
|
|
kind: ParseErrorKind::UnsignedInt,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
fn from_float(input: Span) -> Result<f32, ParsePresetError> {
|
2023-02-08 19:05:55 +11:00
|
|
|
// Presets like to commit ✨CRIMES✨ and end their lines with a ";".
|
|
|
|
// It's too hard to put this in the lexer because we can't tell between
|
|
|
|
// semicolon crimes or a valid param/texture name listing.
|
|
|
|
f32::from_str(input.trim().trim_end_matches(";")).map_err(|_| ParsePresetError::ParserError {
|
|
|
|
offset: input.location_offset(),
|
|
|
|
row: input.location_line(),
|
|
|
|
col: input.get_column(),
|
|
|
|
kind: ParseErrorKind::Float,
|
2022-10-20 16:59:15 +11:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
fn from_bool(input: Span) -> Result<bool, ParsePresetError> {
|
|
|
|
if let Ok(i) = i32::from_str(input.trim()) {
|
|
|
|
return match i {
|
|
|
|
1 => Ok(true),
|
|
|
|
0 => Ok(false),
|
|
|
|
_ => Err(ParsePresetError::ParserError {
|
|
|
|
offset: input.location_offset(),
|
|
|
|
row: input.location_line(),
|
|
|
|
col: input.get_column(),
|
|
|
|
kind: ParseErrorKind::Bool,
|
|
|
|
}),
|
|
|
|
};
|
|
|
|
}
|
|
|
|
bool::from_str(input.trim()).map_err(|_| ParsePresetError::ParserError {
|
|
|
|
offset: input.location_offset(),
|
|
|
|
row: input.location_line(),
|
|
|
|
col: input.get_column(),
|
|
|
|
kind: ParseErrorKind::Bool,
|
|
|
|
})
|
2022-10-20 13:47:43 +11:00
|
|
|
}
|
|
|
|
|
|
|
|
fn parse_indexed_key<'a>(key: &'static str, input: Span<'a>) -> IResult<Span<'a>, i32> {
|
|
|
|
let (input, _) = tag(key)(input)?;
|
|
|
|
let (input, idx) = map_res(digit1, from_int)(input)?;
|
|
|
|
let (input, _) = eof(input)?;
|
|
|
|
Ok((input, idx))
|
|
|
|
}
|
|
|
|
|
|
|
|
pub const SHADER_MAX_REFERENCE_DEPTH: usize = 16;
|
|
|
|
|
2023-07-20 15:51:23 +10:00
|
|
|
// prereq: root_path must be contextualized
|
2022-10-20 13:47:43 +11:00
|
|
|
fn load_child_reference_strings(
|
2023-02-08 19:05:55 +11:00
|
|
|
root_references: Vec<PathBuf>,
|
2022-10-20 13:47:43 +11:00
|
|
|
root_path: impl AsRef<Path>,
|
2024-02-15 09:54:49 +11:00
|
|
|
context: &FastHashMap<String, String>,
|
2022-10-20 13:47:43 +11:00
|
|
|
) -> Result<Vec<(PathBuf, String)>, ParsePresetError> {
|
|
|
|
let root_path = root_path.as_ref();
|
2023-02-08 19:05:55 +11:00
|
|
|
|
2022-10-20 13:47:43 +11:00
|
|
|
let mut reference_depth = 0;
|
2023-02-14 18:48:42 +11:00
|
|
|
let mut reference_strings: VecDeque<(PathBuf, String)> = VecDeque::new();
|
|
|
|
let root_references = vec![(root_path.to_path_buf(), root_references)];
|
|
|
|
let mut root_references = VecDeque::from(root_references);
|
|
|
|
// search needs to be depth first to allow for overrides.
|
2023-07-20 15:51:23 +10:00
|
|
|
while let Some((mut reference_root, referenced_paths)) = root_references.pop_front() {
|
2022-10-20 13:47:43 +11:00
|
|
|
if reference_depth > SHADER_MAX_REFERENCE_DEPTH {
|
|
|
|
return Err(ParsePresetError::ExceededReferenceDepth);
|
|
|
|
}
|
2023-02-08 19:05:55 +11:00
|
|
|
// enter the current root
|
2022-10-21 14:04:29 +11:00
|
|
|
reference_depth += 1;
|
2023-02-08 19:05:55 +11:00
|
|
|
// canonicalize current root
|
2024-02-10 09:48:20 +11:00
|
|
|
apply_context(&mut reference_root, context);
|
2023-02-08 19:05:55 +11:00
|
|
|
let reference_root = reference_root
|
2022-10-20 13:47:43 +11:00
|
|
|
.canonicalize()
|
2023-02-08 19:05:55 +11:00
|
|
|
.map_err(|e| ParsePresetError::IOError(reference_root.to_path_buf(), e))?;
|
2022-10-20 13:47:43 +11:00
|
|
|
|
2023-02-08 19:05:55 +11:00
|
|
|
// resolve all referenced paths against root
|
|
|
|
// println!("Resolving {referenced_paths:?} against {reference_root:?}.");
|
2022-10-20 13:47:43 +11:00
|
|
|
|
2023-02-08 19:05:55 +11:00
|
|
|
for path in referenced_paths {
|
2024-02-09 13:33:17 +11:00
|
|
|
let mut path = reference_root.join(path.clone());
|
2024-02-10 09:48:20 +11:00
|
|
|
apply_context(&mut path, context);
|
2023-07-20 15:51:23 +10:00
|
|
|
|
2024-02-09 13:33:17 +11:00
|
|
|
let mut path = path
|
|
|
|
.canonicalize()
|
2023-02-08 19:05:55 +11:00
|
|
|
.map_err(|e| ParsePresetError::IOError(path.clone(), e))?;
|
|
|
|
// println!("Opening {:?}", path);
|
|
|
|
let mut reference_contents = String::new();
|
|
|
|
File::open(&path)
|
|
|
|
.map_err(|e| ParsePresetError::IOError(path.clone(), e))?
|
|
|
|
.read_to_string(&mut reference_contents)
|
|
|
|
.map_err(|e| ParsePresetError::IOError(path.clone(), e))?;
|
|
|
|
|
|
|
|
let mut new_tokens = do_lex(&reference_contents)?;
|
|
|
|
let new_references: Vec<PathBuf> = new_tokens
|
2023-07-20 14:38:13 +10:00
|
|
|
.extract_if(|token| *token.key.fragment() == "#reference")
|
2023-02-08 19:05:55 +11:00
|
|
|
.map(|value| PathBuf::from(*value.value.fragment()))
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
path.pop();
|
2023-02-14 18:48:42 +11:00
|
|
|
reference_strings.push_front((path.clone(), reference_contents));
|
2023-02-08 19:05:55 +11:00
|
|
|
if !new_references.is_empty() {
|
2023-02-14 18:48:42 +11:00
|
|
|
root_references.push_front((path, new_references));
|
2023-02-08 19:05:55 +11:00
|
|
|
}
|
2022-10-20 13:47:43 +11:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-02-14 18:48:42 +11:00
|
|
|
Ok(reference_strings.into())
|
2022-10-20 13:47:43 +11:00
|
|
|
}
|
|
|
|
|
2024-02-09 13:33:17 +11:00
|
|
|
pub(crate) fn parse_preset(
|
|
|
|
path: impl AsRef<Path>,
|
|
|
|
context: WildcardContext,
|
|
|
|
) -> Result<Vec<Value>, ParsePresetError> {
|
2022-10-20 13:47:43 +11:00
|
|
|
let path = path.as_ref();
|
2023-07-20 15:51:23 +10:00
|
|
|
let mut path = path.to_path_buf();
|
2024-02-09 13:33:17 +11:00
|
|
|
let context = context.to_hashmap();
|
|
|
|
|
2024-02-10 09:48:20 +11:00
|
|
|
apply_context(&mut path, &context);
|
2023-07-20 15:51:23 +10:00
|
|
|
|
2022-10-20 16:59:15 +11:00
|
|
|
let path = path
|
|
|
|
.canonicalize()
|
2022-10-20 13:47:43 +11:00
|
|
|
.map_err(|e| ParsePresetError::IOError(path.to_path_buf(), e))?;
|
|
|
|
|
|
|
|
let mut contents = String::new();
|
2022-10-20 16:59:15 +11:00
|
|
|
File::open(&path)
|
|
|
|
.and_then(|mut f| f.read_to_string(&mut contents))
|
2022-10-20 13:47:43 +11:00
|
|
|
.map_err(|e| ParsePresetError::IOError(path.to_path_buf(), e))?;
|
|
|
|
|
|
|
|
let tokens = super::token::do_lex(&contents)?;
|
2023-07-20 15:51:23 +10:00
|
|
|
parse_values(tokens, path, context)
|
2022-10-20 13:47:43 +11:00
|
|
|
}
|
|
|
|
|
2023-07-20 15:51:23 +10:00
|
|
|
// prereq: root_path must be contextualized
|
2022-10-20 13:47:43 +11:00
|
|
|
pub fn parse_values(
|
|
|
|
mut tokens: Vec<Token>,
|
|
|
|
root_path: impl AsRef<Path>,
|
2024-02-15 09:54:49 +11:00
|
|
|
context: FastHashMap<String, String>,
|
2022-10-20 13:47:43 +11:00
|
|
|
) -> Result<Vec<Value>, ParsePresetError> {
|
|
|
|
let mut root_path = root_path.as_ref().to_path_buf();
|
|
|
|
if root_path.is_relative() {
|
|
|
|
return Err(ParsePresetError::RootPathWasNotAbsolute);
|
|
|
|
}
|
|
|
|
if !root_path.is_dir() {
|
|
|
|
// we don't really care if this doesn't do anything because a non-canonical root path will
|
|
|
|
// fail at a later stage during resolution.
|
|
|
|
root_path.pop();
|
|
|
|
}
|
|
|
|
|
|
|
|
let references: Vec<PathBuf> = tokens
|
2023-07-20 14:38:13 +10:00
|
|
|
.extract_if(|token| *token.key.fragment() == "#reference")
|
2022-10-20 13:47:43 +11:00
|
|
|
.map(|value| PathBuf::from(*value.value.fragment()))
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
// unfortunately we need to lex twice because there's no way to know the references ahead of time.
|
2023-07-20 15:51:23 +10:00
|
|
|
// the returned references should have context applied
|
|
|
|
|
|
|
|
let child_strings = load_child_reference_strings(references, &root_path, &context)?;
|
2022-10-20 13:47:43 +11:00
|
|
|
let mut all_tokens: Vec<(&Path, Vec<Token>)> = Vec::new();
|
|
|
|
|
|
|
|
for (path, string) in child_strings.iter() {
|
2023-02-14 18:48:42 +11:00
|
|
|
// lex the child tokens
|
2022-10-21 13:52:34 +11:00
|
|
|
let mut tokens = do_lex(string.as_ref())?;
|
2022-10-20 13:47:43 +11:00
|
|
|
tokens.retain(|token| *token.key.fragment() != "#reference");
|
|
|
|
all_tokens.push((path.as_path(), tokens))
|
|
|
|
}
|
|
|
|
|
2023-02-14 18:48:42 +11:00
|
|
|
// load depth first, so all child tokens are first.
|
|
|
|
// Later tokens take precedence.
|
|
|
|
all_tokens.push((root_path.as_path(), tokens));
|
2023-02-24 15:57:20 +11:00
|
|
|
|
2022-10-20 13:47:43 +11:00
|
|
|
// collect all possible parameter names.
|
|
|
|
let mut parameter_names: Vec<&str> = Vec::new();
|
|
|
|
for (_, tokens) in all_tokens.iter_mut() {
|
2023-07-20 14:38:13 +10:00
|
|
|
for token in tokens.extract_if(|token| *token.key.fragment() == "parameters") {
|
2022-10-21 14:04:29 +11:00
|
|
|
let parameter_name_string: &str = token.value.fragment();
|
|
|
|
for parameter_name in parameter_name_string.split(';') {
|
2022-10-20 13:47:43 +11:00
|
|
|
parameter_names.push(parameter_name);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// collect all possible texture names.
|
|
|
|
let mut texture_names: Vec<&str> = Vec::new();
|
|
|
|
for (_, tokens) in all_tokens.iter_mut() {
|
2023-07-20 14:38:13 +10:00
|
|
|
for token in tokens.extract_if(|token| *token.key.fragment() == "textures") {
|
2022-10-21 14:04:29 +11:00
|
|
|
let texture_name_string: &str = token.value.fragment();
|
|
|
|
for texture_name in texture_name_string.split(';') {
|
2022-10-20 13:47:43 +11:00
|
|
|
texture_names.push(texture_name);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut values = Vec::new();
|
|
|
|
// resolve shader paths.
|
2022-10-21 14:04:29 +11:00
|
|
|
for (path, tokens) in all_tokens.iter_mut() {
|
2023-07-20 14:38:13 +10:00
|
|
|
for token in tokens.extract_if(|token| parse_indexed_key("shader", token.key).is_ok()) {
|
2022-10-20 13:47:43 +11:00
|
|
|
let (_, index) = parse_indexed_key("shader", token.key).map_err(|e| match e {
|
|
|
|
nom::Err::Error(e) | nom::Err::Failure(e) => {
|
|
|
|
let input: Span = e.input;
|
|
|
|
ParsePresetError::ParserError {
|
|
|
|
offset: input.location_offset(),
|
|
|
|
row: input.location_line(),
|
|
|
|
col: input.get_column(),
|
2022-10-20 16:59:15 +11:00
|
|
|
kind: ParseErrorKind::Index("shader"),
|
2022-10-20 13:47:43 +11:00
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => ParsePresetError::ParserError {
|
|
|
|
offset: 0,
|
|
|
|
row: 0,
|
|
|
|
col: 0,
|
2022-10-20 16:59:15 +11:00
|
|
|
kind: ParseErrorKind::Index("shader"),
|
2022-10-20 13:47:43 +11:00
|
|
|
},
|
|
|
|
})?;
|
|
|
|
|
|
|
|
let mut relative_path = path.to_path_buf();
|
|
|
|
relative_path.push(*token.value.fragment());
|
|
|
|
relative_path
|
|
|
|
.canonicalize()
|
|
|
|
.map_err(|e| ParsePresetError::IOError(relative_path.clone(), e))?;
|
|
|
|
values.push(Value::Shader(index, relative_path))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// resolve texture paths
|
|
|
|
let mut textures = Vec::new();
|
2022-10-21 14:04:29 +11:00
|
|
|
for (path, tokens) in all_tokens.iter_mut() {
|
2023-07-20 14:38:13 +10:00
|
|
|
for token in tokens.extract_if(|token| texture_names.contains(token.key.fragment())) {
|
2022-10-20 13:47:43 +11:00
|
|
|
let mut relative_path = path.to_path_buf();
|
|
|
|
relative_path.push(*token.value.fragment());
|
|
|
|
relative_path
|
|
|
|
.canonicalize()
|
|
|
|
.map_err(|e| ParsePresetError::IOError(relative_path.clone(), e))?;
|
|
|
|
textures.push((token.key, relative_path))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-02-08 19:05:55 +11:00
|
|
|
let mut tokens: Vec<(&Path, Token)> = all_tokens
|
2022-10-20 13:47:43 +11:00
|
|
|
.into_iter()
|
2023-02-08 19:05:55 +11:00
|
|
|
.flat_map(|(p, token)| token.into_iter().map(move |t| (p, t)))
|
2022-10-20 13:47:43 +11:00
|
|
|
.collect();
|
2022-10-20 16:59:15 +11:00
|
|
|
|
|
|
|
for (texture, path) in textures {
|
2023-02-08 19:05:55 +11:00
|
|
|
let mipmap = remove_if(&mut tokens, |(_, t)| {
|
2022-10-21 13:52:34 +11:00
|
|
|
t.key.starts_with(*texture)
|
|
|
|
&& t.key.ends_with("_mipmap")
|
|
|
|
&& t.key.len() == texture.len() + "_mipmap".len()
|
2022-10-21 14:05:56 +11:00
|
|
|
})
|
2023-02-08 19:05:55 +11:00
|
|
|
.map_or_else(|| Ok(false), |(_, v)| from_bool(v.value))?;
|
2022-10-20 16:59:15 +11:00
|
|
|
|
2023-02-08 19:05:55 +11:00
|
|
|
let linear = remove_if(&mut tokens, |(_, t)| {
|
2022-10-21 14:05:56 +11:00
|
|
|
t.key.starts_with(*texture)
|
|
|
|
&& t.key.ends_with("_linear")
|
|
|
|
&& t.key.len() == texture.len() + "_linear".len()
|
|
|
|
})
|
2023-02-08 19:05:55 +11:00
|
|
|
.map_or_else(|| Ok(false), |(_, v)| from_bool(v.value))?;
|
2022-10-20 16:59:15 +11:00
|
|
|
|
2023-02-08 19:05:55 +11:00
|
|
|
let wrap_mode = remove_if(&mut tokens, |(_, t)| {
|
2022-10-21 14:05:56 +11:00
|
|
|
t.key.starts_with(*texture)
|
2023-02-08 19:05:55 +11:00
|
|
|
&& (t.key.ends_with("_wrap_mode") || t.key.ends_with("_repeat_mode"))
|
|
|
|
&& (t.key.len() == texture.len() + "_wrap_mode".len()
|
|
|
|
|| t.key.len() == texture.len() + "_repeat_mode".len())
|
2022-10-21 14:05:56 +11:00
|
|
|
})
|
|
|
|
// NOPANIC: infallible
|
2023-02-08 19:05:55 +11:00
|
|
|
.map_or_else(WrapMode::default, |(_, v)| {
|
|
|
|
WrapMode::from_str(&v.value).unwrap()
|
|
|
|
});
|
|
|
|
|
|
|
|
// This really isn't supported but crt-torridgristle uses this syntax.
|
|
|
|
// Again, don't know how this works in RA but RA's parser isn't as strict as ours.
|
|
|
|
let filter = remove_if(&mut tokens, |(_, t)| {
|
|
|
|
t.key.starts_with("filter_")
|
|
|
|
&& t.key.ends_with(*texture)
|
|
|
|
&& t.key.len() == "filter_".len() + texture.len()
|
|
|
|
})
|
|
|
|
// NOPANIC: infallible
|
|
|
|
.map_or(None, |(_, v)| Some(FilterMode::from_str(&v.value).unwrap()));
|
2022-10-20 16:59:15 +11:00
|
|
|
|
|
|
|
values.push(Value::Texture {
|
|
|
|
name: texture.to_string(),
|
2023-02-08 19:05:55 +11:00
|
|
|
filter_mode: filter.unwrap_or(if linear {
|
2022-10-20 16:59:15 +11:00
|
|
|
FilterMode::Linear
|
|
|
|
} else {
|
|
|
|
FilterMode::Nearest
|
2023-02-08 19:05:55 +11:00
|
|
|
}),
|
2022-10-20 16:59:15 +11:00
|
|
|
wrap_mode,
|
|
|
|
mipmap,
|
|
|
|
path,
|
|
|
|
})
|
|
|
|
}
|
2022-10-20 17:26:21 +11:00
|
|
|
|
|
|
|
let mut rest_tokens = Vec::new();
|
2023-02-08 19:05:55 +11:00
|
|
|
// hopefully no more textures left in the token tree
|
|
|
|
for (p, token) in tokens {
|
2022-10-20 16:59:15 +11:00
|
|
|
if parameter_names.contains(token.key.fragment()) {
|
2023-02-08 19:05:55 +11:00
|
|
|
let param_val = from_float(token.value)
|
|
|
|
// This is literally just to work around BEAM_PROFILE in crt-hyllian-sinc-glow.slangp
|
|
|
|
// which has ""0'.000000". This somehow works in RA because it defaults to 0, probably.
|
|
|
|
// This hack is only used for **known** parameter names. If we tried this for undeclared
|
|
|
|
// params (god help me), it would be pretty bad because we lose texture path fallback.
|
|
|
|
.unwrap_or(0.0);
|
2022-10-20 16:59:15 +11:00
|
|
|
values.push(Value::Parameter(
|
|
|
|
token.key.fragment().to_string(),
|
|
|
|
param_val,
|
|
|
|
));
|
|
|
|
continue;
|
|
|
|
}
|
2022-10-20 17:26:21 +11:00
|
|
|
if token.key.fragment() == &"shaders" {
|
|
|
|
let shader_count = from_int(token.value)?;
|
|
|
|
values.push(Value::ShaderCount(shader_count));
|
|
|
|
continue;
|
|
|
|
}
|
2022-10-20 17:37:56 +11:00
|
|
|
if token.key.fragment() == &"feedback_pass" {
|
|
|
|
let feedback_pass = from_int(token.value)?;
|
|
|
|
values.push(Value::FeedbackPass(feedback_pass));
|
|
|
|
continue;
|
|
|
|
}
|
2022-10-20 17:26:21 +11:00
|
|
|
if let Ok((_, idx)) = parse_indexed_key("filter_linear", token.key) {
|
|
|
|
let linear = from_bool(token.value)?;
|
2022-10-21 14:05:56 +11:00
|
|
|
values.push(Value::FilterMode(
|
|
|
|
idx,
|
|
|
|
if linear {
|
|
|
|
FilterMode::Linear
|
|
|
|
} else {
|
|
|
|
FilterMode::Nearest
|
|
|
|
},
|
|
|
|
));
|
2022-10-20 17:26:21 +11:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
if let Ok((_, idx)) = parse_indexed_key("wrap_mode", token.key) {
|
2022-10-21 14:04:29 +11:00
|
|
|
let wrap_mode = WrapMode::from_str(&token.value).unwrap();
|
2022-10-20 17:26:21 +11:00
|
|
|
values.push(Value::WrapMode(idx, wrap_mode));
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2023-02-08 19:05:55 +11:00
|
|
|
// crt-geom uses repeat_mode...
|
|
|
|
if let Ok((_, idx)) = parse_indexed_key("repeat_mode", token.key) {
|
|
|
|
let wrap_mode = WrapMode::from_str(&token.value).unwrap();
|
|
|
|
values.push(Value::WrapMode(idx, wrap_mode));
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2022-11-13 17:57:22 +11:00
|
|
|
// crt-royale uses 'texture_wrap_mode' instead of 'wrap_mode', I have no idea
|
|
|
|
// how this possibly could work in RA, but here it is..
|
|
|
|
if let Ok((_, idx)) = parse_indexed_key("texture_wrap_mode", token.key) {
|
|
|
|
let wrap_mode = WrapMode::from_str(&token.value).unwrap();
|
|
|
|
values.push(Value::WrapMode(idx, wrap_mode));
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2022-10-20 17:26:21 +11:00
|
|
|
if let Ok((_, idx)) = parse_indexed_key("frame_count_mod", token.key) {
|
|
|
|
let frame_count_mod = from_ul(token.value)?;
|
|
|
|
values.push(Value::FrameCountMod(idx, frame_count_mod));
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
if let Ok((_, idx)) = parse_indexed_key("srgb_framebuffer", token.key) {
|
|
|
|
let enabled = from_bool(token.value)?;
|
|
|
|
values.push(Value::SrgbFramebuffer(idx, enabled));
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
if let Ok((_, idx)) = parse_indexed_key("float_framebuffer", token.key) {
|
|
|
|
let enabled = from_bool(token.value)?;
|
|
|
|
values.push(Value::FloatFramebuffer(idx, enabled));
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
if let Ok((_, idx)) = parse_indexed_key("mipmap_input", token.key) {
|
|
|
|
let enabled = from_bool(token.value)?;
|
|
|
|
values.push(Value::MipmapInput(idx, enabled));
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2023-02-08 19:05:55 +11:00
|
|
|
// vector-glow-alt-render.slangp uses "mipmap" for pass 1, but "mipmap_input" for everything else.
|
|
|
|
if let Ok((_, idx)) = parse_indexed_key("mipmap", token.key) {
|
|
|
|
let enabled = from_bool(token.value)?;
|
|
|
|
values.push(Value::MipmapInput(idx, enabled));
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2022-10-20 17:26:21 +11:00
|
|
|
if let Ok((_, idx)) = parse_indexed_key("alias", token.key) {
|
|
|
|
values.push(Value::Alias(idx, token.value.to_string()));
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
if let Ok((_, idx)) = parse_indexed_key("scale_type", token.key) {
|
|
|
|
let scale_type = ScaleType::from_str(token.value.trim())?;
|
|
|
|
values.push(Value::ScaleType(idx, scale_type));
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
if let Ok((_, idx)) = parse_indexed_key("scale_type_x", token.key) {
|
|
|
|
let scale_type = ScaleType::from_str(token.value.trim())?;
|
2022-10-21 14:04:29 +11:00
|
|
|
values.push(Value::ScaleTypeX(idx, scale_type));
|
2022-10-20 17:26:21 +11:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
if let Ok((_, idx)) = parse_indexed_key("scale_type_y", token.key) {
|
|
|
|
let scale_type = ScaleType::from_str(token.value.trim())?;
|
2022-10-21 14:04:29 +11:00
|
|
|
values.push(Value::ScaleTypeY(idx, scale_type));
|
2022-10-20 17:26:21 +11:00
|
|
|
continue;
|
|
|
|
}
|
2023-02-08 19:05:55 +11:00
|
|
|
rest_tokens.push((p, token))
|
2022-10-20 17:26:21 +11:00
|
|
|
}
|
|
|
|
|
2023-02-08 19:05:55 +11:00
|
|
|
let mut undeclared_textures = Vec::new();
|
|
|
|
for (path, token) in &rest_tokens {
|
2022-10-20 17:26:21 +11:00
|
|
|
if let Ok((_, idx)) = parse_indexed_key("scale", token.key) {
|
2022-10-21 14:04:29 +11:00
|
|
|
let scale = if values.iter().any(|t| matches!(*t, Value::ScaleType(match_idx, ScaleType::Absolute) if match_idx == idx)) {
|
2022-10-20 17:26:21 +11:00
|
|
|
let scale = from_int(token.value)?;
|
|
|
|
ScaleFactor::Absolute(scale)
|
|
|
|
} else {
|
|
|
|
let scale = from_float(token.value)?;
|
|
|
|
ScaleFactor::Float(scale)
|
|
|
|
};
|
|
|
|
|
|
|
|
values.push(Value::Scale(idx, scale));
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
if let Ok((_, idx)) = parse_indexed_key("scale_x", token.key) {
|
2022-10-21 14:11:23 +11:00
|
|
|
let scale = if values.iter().any(|t| matches!(*t, Value::ScaleType(match_idx, ScaleType::Absolute) | Value::ScaleTypeX(match_idx, ScaleType::Absolute) if match_idx == idx)) {
|
2022-10-20 17:26:21 +11:00
|
|
|
let scale = from_int(token.value)?;
|
|
|
|
ScaleFactor::Absolute(scale)
|
|
|
|
} else {
|
|
|
|
let scale = from_float(token.value)?;
|
|
|
|
ScaleFactor::Float(scale)
|
|
|
|
};
|
|
|
|
|
2022-10-21 14:04:29 +11:00
|
|
|
values.push(Value::ScaleX(idx, scale));
|
2022-10-20 17:26:21 +11:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
if let Ok((_, idx)) = parse_indexed_key("scale_y", token.key) {
|
2022-10-21 14:11:23 +11:00
|
|
|
let scale = if values.iter().any(|t| matches!(*t, Value::ScaleType(match_idx, ScaleType::Absolute) | Value::ScaleTypeY(match_idx, ScaleType::Absolute) if match_idx == idx)) {
|
2022-10-20 17:26:21 +11:00
|
|
|
let scale = from_int(token.value)?;
|
|
|
|
ScaleFactor::Absolute(scale)
|
|
|
|
} else {
|
|
|
|
let scale = from_float(token.value)?;
|
|
|
|
ScaleFactor::Float(scale)
|
|
|
|
};
|
|
|
|
|
2022-10-21 14:04:29 +11:00
|
|
|
values.push(Value::ScaleY(idx, scale));
|
2022-10-20 17:26:21 +11:00
|
|
|
continue;
|
|
|
|
}
|
2022-10-20 16:59:15 +11:00
|
|
|
|
|
|
|
// handle undeclared parameters after parsing everything else as a last resort.
|
2023-02-08 19:05:55 +11:00
|
|
|
if let Ok(param_val) = from_float(token.value) {
|
|
|
|
values.push(Value::Parameter(
|
|
|
|
token.key.fragment().to_string(),
|
|
|
|
param_val,
|
|
|
|
));
|
|
|
|
}
|
|
|
|
// very last resort, assume undeclared texture (must have extension)
|
|
|
|
else if Path::new(token.value.fragment()).extension().is_some()
|
|
|
|
&& ["_mipmap", "_linear", "_wrap_mode", "_repeat_mode"]
|
|
|
|
.iter()
|
|
|
|
.all(|k| !token.key.ends_with(k))
|
|
|
|
{
|
|
|
|
let mut relative_path = path.to_path_buf();
|
|
|
|
relative_path.push(*token.value.fragment());
|
|
|
|
relative_path
|
|
|
|
.canonicalize()
|
|
|
|
.map_err(|e| ParsePresetError::IOError(relative_path.clone(), e))?;
|
|
|
|
undeclared_textures.push((token.key, relative_path));
|
|
|
|
}
|
|
|
|
|
|
|
|
// we tried our best
|
|
|
|
}
|
|
|
|
|
|
|
|
// Since there are undeclared textures we need to deal with potential mipmap information.
|
|
|
|
for (texture, path) in undeclared_textures {
|
|
|
|
let mipmap = remove_if(&mut rest_tokens, |(_, t)| {
|
|
|
|
t.key.starts_with(*texture)
|
|
|
|
&& t.key.ends_with("_mipmap")
|
|
|
|
&& t.key.len() == texture.len() + "_mipmap".len()
|
|
|
|
})
|
|
|
|
.map_or_else(|| Ok(false), |(_, v)| from_bool(v.value))?;
|
|
|
|
|
|
|
|
let linear = remove_if(&mut rest_tokens, |(_, t)| {
|
|
|
|
t.key.starts_with(*texture)
|
|
|
|
&& t.key.ends_with("_linear")
|
|
|
|
&& t.key.len() == texture.len() + "_linear".len()
|
|
|
|
})
|
|
|
|
.map_or_else(|| Ok(false), |(_, v)| from_bool(v.value))?;
|
|
|
|
|
|
|
|
let wrap_mode = remove_if(&mut rest_tokens, |(_, t)| {
|
|
|
|
t.key.starts_with(*texture)
|
|
|
|
&& (t.key.ends_with("_wrap_mode") || t.key.ends_with("_repeat_mode"))
|
|
|
|
&& (t.key.len() == texture.len() + "_wrap_mode".len()
|
|
|
|
|| t.key.len() == texture.len() + "_repeat_mode".len())
|
|
|
|
})
|
|
|
|
// NOPANIC: infallible
|
|
|
|
.map_or_else(WrapMode::default, |(_, v)| {
|
|
|
|
WrapMode::from_str(&v.value).unwrap()
|
|
|
|
});
|
|
|
|
|
|
|
|
values.push(Value::Texture {
|
|
|
|
name: texture.to_string(),
|
|
|
|
filter_mode: if linear {
|
|
|
|
FilterMode::Linear
|
|
|
|
} else {
|
|
|
|
FilterMode::Nearest
|
|
|
|
},
|
|
|
|
wrap_mode,
|
|
|
|
mipmap,
|
|
|
|
path,
|
|
|
|
})
|
2022-10-20 16:59:15 +11:00
|
|
|
}
|
|
|
|
|
2022-10-20 13:47:43 +11:00
|
|
|
// all tokens should be ok to process now.
|
|
|
|
Ok(values)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
mod test {
|
|
|
|
use crate::parse::value::parse_preset;
|
2024-02-10 09:48:20 +11:00
|
|
|
use crate::WildcardContext;
|
2024-02-11 10:54:57 +11:00
|
|
|
use std::path::PathBuf;
|
2022-10-20 13:47:43 +11:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
pub fn parse_basic() {
|
2022-10-20 16:59:15 +11:00
|
|
|
let root =
|
2022-10-21 15:09:38 +11:00
|
|
|
PathBuf::from("../test/slang-shaders/bezel/Mega_Bezel/Presets/Base_CRT_Presets/MBZ__3__STD__MEGATRON-NTSC.slangp");
|
2024-02-10 09:48:20 +11:00
|
|
|
let basic = parse_preset(root, WildcardContext::new());
|
2023-01-19 12:54:57 +11:00
|
|
|
eprintln!("{basic:?}");
|
2022-10-20 16:59:15 +11:00
|
|
|
assert!(basic.is_ok());
|
2022-10-20 13:47:43 +11:00
|
|
|
}
|
2022-10-20 16:59:15 +11:00
|
|
|
}
|