clippy fixes

This commit is contained in:
Alex Janka 2024-07-20 17:35:28 +10:00
parent fedc96730b
commit e9f2789703
19 changed files with 94 additions and 120 deletions

View file

@ -120,5 +120,5 @@ pub fn main() -> ExitCode {
}
}
return ExitCode::SUCCESS;
ExitCode::SUCCESS
}

View file

@ -46,7 +46,7 @@ pub(crate) mod internal {
pub(crate) fn get_cache() -> Result<Persy, Box<dyn Error>> {
let cache_dir = get_cache_dir()?;
match Persy::open_or_create_with(
&cache_dir.join("librashader.db.1"),
cache_dir.join("librashader.db.1"),
Config::new(),
|persy| {
let tx = persy.begin()?;
@ -110,13 +110,13 @@ where
T: Cacheable,
{
if bypass_cache {
return Ok(load(factory(keys)?)?);
return load(factory(keys)?);
}
let cache = internal::get_cache();
let Ok(cache) = cache else {
return Ok(load(factory(keys)?)?);
return load(factory(keys)?);
};
let hashkey = {
@ -124,8 +124,8 @@ where
for subkeys in keys {
hasher.update(subkeys.hash_bytes());
}
let hash = hasher.finalize();
hash
hasher.finalize()
};
'attempt: {
@ -145,7 +145,7 @@ where
if let Some(slice) = T::to_bytes(&blob) {
let _ = internal::set_blob(&cache, index, hashkey.as_bytes(), &slice);
}
Ok(load(blob)?)
load(blob)
}
/// Cache a pipeline state object.
@ -166,13 +166,13 @@ where
T: Cacheable,
{
if bypass_cache {
return Ok(restore_pipeline(None)?);
return restore_pipeline(None);
}
let cache = internal::get_cache();
let Ok(cache) = cache else {
return Ok(restore_pipeline(None)?);
return restore_pipeline(None);
};
let hashkey = {
@ -180,18 +180,15 @@ where
for subkeys in keys {
hasher.update(subkeys.hash_bytes());
}
let hash = hasher.finalize();
hash
hasher.finalize()
};
let pipeline = 'attempt: {
if let Ok(Some(blob)) = internal::get_blob(&cache, index, hashkey.as_bytes()) {
let cached = restore_pipeline(Some(blob));
match cached {
Ok(res) => {
break 'attempt res;
}
_ => (),
if let Ok(res) = cached {
break 'attempt res;
}
}

View file

@ -36,8 +36,8 @@ where
let mut hasher = blake3::Hasher::new();
hasher.update(source.vertex.as_bytes());
hasher.update(source.fragment.as_bytes());
let hash = hasher.finalize();
hash
hasher.finalize()
};
let compilation = 'cached: {

View file

@ -7,13 +7,13 @@ pub trait CacheKey {
impl CacheKey for u32 {
fn hash_bytes(&self) -> &[u8] {
&bytemuck::bytes_of(&*self)
bytemuck::bytes_of(self)
}
}
impl CacheKey for i32 {
fn hash_bytes(&self) -> &[u8] {
&bytemuck::bytes_of(&*self)
bytemuck::bytes_of(self)
}
}
@ -25,13 +25,13 @@ impl CacheKey for &[u8] {
impl CacheKey for Vec<u8> {
fn hash_bytes(&self) -> &[u8] {
&self
self
}
}
impl CacheKey for Vec<u32> {
fn hash_bytes(&self) -> &[u8] {
bytemuck::cast_slice(&self)
bytemuck::cast_slice(self)
}
}

View file

@ -275,6 +275,12 @@ impl Display for ContextItem {
#[derive(Debug, Clone)]
pub struct WildcardContext(VecDeque<ContextItem>);
impl Default for WildcardContext {
fn default() -> Self {
Self::new()
}
}
impl WildcardContext {
/// Create a new wildcard context.
pub fn new() -> Self {
@ -391,12 +397,12 @@ pub(crate) fn apply_context(path: &mut PathBuf, context: &FastHashMap<String, St
if let Some(replacement) = context.get(key) {
return OsString::from(replacement.to_string()).into_encoded_bytes();
}
return caps[0].to_vec();
caps[0].to_vec()
});
// SAFETY: The original source is valid encoded bytes, and our replacement is
// valid encoded bytes. This upholds the safety requirements of `from_encoded_bytes_unchecked`.
new_path.push(unsafe { OsStr::from_encoded_bytes_unchecked(&replaced.as_ref()) })
new_path.push(unsafe { OsStr::from_encoded_bytes_unchecked(replaced.as_ref()) })
}
_ => new_path.push(component),
}

View file

@ -23,6 +23,7 @@ use crate::extract_if::MakeExtractIf;
#[derive(Debug)]
pub enum Value {
ShaderCount(i32),
#[allow(dead_code)]
FeedbackPass(i32),
Shader(i32, PathBuf),
ScaleX(i32, ScaleFactor),
@ -376,8 +377,7 @@ pub fn parse_values(
&& t.key.ends_with(*texture)
&& t.key.len() == "filter_".len() + texture.len()
})
// NOPANIC: infallible
.map_or(None, |(_, v)| Some(FilterMode::from_str(&v.value).unwrap()));
.map(|(_, v)| FilterMode::from_str(&v.value).unwrap());
values.push(Value::Texture {
name: texture.to_string(),

View file

@ -16,7 +16,7 @@ fn parses_all_slang_presets() {
#[test]
fn parses_problematic() {
let path = "../test/Mega_Bezel_Packs/Duimon-Mega-Bezel/Presets/Advanced/Nintendo_NDS_DREZ/NDS-[DREZ]-[Native]-[ADV]-[Guest]-[Night].slangp";
ShaderPreset::try_parse(path).expect(&format!("Failed to parse {}", path));
ShaderPreset::try_parse(path).unwrap_or_else(|_| panic!("Failed to parse {}", path));
}
#[test]
@ -30,5 +30,5 @@ fn parses_wildcard() {
context.append_item(ContextItem::CoreName(String::from("image display")));
ShaderPreset::try_parse_with_context(path, context)
.expect(&format!("Failed to parse {}", path));
.unwrap_or_else(|_| panic!("Failed to parse {}", path));
}

View file

@ -47,7 +47,7 @@ impl HlslBufferAssignments {
{
return true;
}
return false;
false
}
// Check if the mangled name matches.
@ -87,7 +87,7 @@ impl HlslBufferAssignments {
}
}
return false;
false
}
}

View file

@ -18,7 +18,7 @@ impl<'a> LinkInputs<'a> {
return None;
};
let Some(&Operand::IdRef(target)) = op.operands.get(0) else {
let Some(&Operand::IdRef(target)) = op.operands.first() else {
return None;
};
@ -29,7 +29,7 @@ impl<'a> LinkInputs<'a> {
let Some(&Operand::LiteralBit32(binding)) = op.operands.get(2) else {
return None;
};
return Some(binding);
Some(binding)
})
}
@ -62,7 +62,7 @@ impl<'a> LinkInputs<'a> {
if let Some(frag_ref) = bindings.get(&location) {
// if something is bound to the same location in the vertex shader,
// we're good.
inputs.remove(&frag_ref);
inputs.remove(frag_ref);
}
}
}
@ -112,7 +112,7 @@ impl<'a> LinkInputs<'a> {
}
}
}
return true;
true
});
self.frag_builder.module_mut().annotations.retain(|instr| {
@ -123,7 +123,7 @@ impl<'a> LinkInputs<'a> {
}
}
}
return true;
true
});
for entry_point in self.frag_builder.module_mut().entry_points.iter_mut() {
@ -133,7 +133,7 @@ impl<'a> LinkInputs<'a> {
return false;
}
}
return true;
true
})
}

View file

@ -33,12 +33,12 @@ struct OpAccessChain<'a> {
impl<'a> LowerCombinedImageSamplerPass<'a> {
pub fn new(builder: &'a mut Builder) -> Self {
let val = Self {
Self {
builder,
seen_functions: FxHashSet::default(),
};
val
}
}
pub(crate) fn do_pass(&mut self) {
@ -109,7 +109,7 @@ impl<'a> LowerCombinedImageSamplerPass<'a> {
return None;
}
let Some(&Operand::IdRef(target)) = &i.operands.get(0) else {
let Some(&Operand::IdRef(target)) = &i.operands.first() else {
return None;
};
@ -121,7 +121,7 @@ impl<'a> LowerCombinedImageSamplerPass<'a> {
return None;
};
return Some(format!("_{string}_sampler"));
Some(format!("_{string}_sampler"))
})
}
@ -130,7 +130,7 @@ impl<'a> LowerCombinedImageSamplerPass<'a> {
return None;
}
let Some(&Operand::IdRef(referand)) = inst.operands.get(0) else {
let Some(&Operand::IdRef(referand)) = inst.operands.first() else {
return None;
};
@ -183,7 +183,7 @@ impl<'a> LowerCombinedImageSamplerPass<'a> {
self.builder.decorate(
sampler_uniform,
decoration_type,
decoration.operands[2..].iter().map(|f| f.clone()),
decoration.operands[2..].iter().cloned(),
)
}
@ -264,7 +264,7 @@ impl<'a> LowerCombinedImageSamplerPass<'a> {
}
if uniform_type.class.opcode == spirv::Op::TypeArray {
let Some(&Operand::IdRef(array_base_type)) = uniform_type.operands.get(0)
let Some(&Operand::IdRef(array_base_type)) = uniform_type.operands.first()
else {
continue;
};
@ -402,7 +402,7 @@ impl<'a> LowerCombinedImageSamplerPass<'a> {
}
// This doesn't affect array loads because array loads load the result of the OpAccessChain which can be done in a separate pass.
let Some(Operand::IdRef(op_variable_id)) = &instr.operands.get(0) else {
let Some(Operand::IdRef(op_variable_id)) = &instr.operands.first() else {
instructions.push(instr);
continue;
};
@ -474,7 +474,7 @@ impl<'a> LowerCombinedImageSamplerPass<'a> {
}
// This doesn't affect array loads because array loads load the result of the OpAccessChain which can be done in a separate pass.
let Some(Operand::IdRef(op_variable)) = &instr.operands.get(0) else {
let Some(Operand::IdRef(op_variable)) = &instr.operands.first() else {
instructions.push(instr);
continue;
};
@ -565,7 +565,7 @@ impl<'a> LowerCombinedImageSamplerPass<'a> {
continue;
}
let Some(Operand::IdRef(op_variable)) = &instr.operands.get(0) else {
let Some(Operand::IdRef(op_variable)) = &instr.operands.first() else {
instructions.push(instr);
continue;
};
@ -656,7 +656,7 @@ impl<'a> LowerCombinedImageSamplerPass<'a> {
continue;
}
let Some(&Operand::IdRef(function_id)) = instr.operands.get(0) else {
let Some(&Operand::IdRef(function_id)) = instr.operands.first() else {
continue;
};
@ -729,7 +729,7 @@ impl<'a> LowerCombinedImageSamplerPass<'a> {
continue;
}
let Some(&Operand::IdRef(function_id)) = instr.operands.get(0) else {
let Some(&Operand::IdRef(function_id)) = instr.operands.first() else {
instructions.push(instr);
continue;
};
@ -742,7 +742,7 @@ impl<'a> LowerCombinedImageSamplerPass<'a> {
return false;
};
op_access_chains.contains_key(&op_ref_id)
op_access_chains.contains_key(op_ref_id)
})
.collect::<Vec<_>>();
@ -861,9 +861,9 @@ impl<'a> LowerCombinedImageSamplerPass<'a> {
seen_functions
}
fn rewrite_functions_definitions<'b>(
fn rewrite_functions_definitions(
&mut self,
mappings: &FxHashMap<spirv::Word, FxHashMap<spirv::Word, Cow<'b, CombinedImageSampler>>>,
mappings: &FxHashMap<spirv::Word, FxHashMap<spirv::Word, Cow<'_, CombinedImageSampler>>>,
) -> FxHashMap<spirv::Word, CombinedImageSampler> {
let mut sampled_refs = FxHashMap::default();
let mut functions = self.builder.module_ref().functions.clone();

View file

@ -5,6 +5,6 @@ pub mod lower_samplers;
pub(crate) fn load_module(words: &[u32]) -> rspirv::dr::Module {
let mut loader = rspirv::dr::Loader::new();
rspirv::binary::parse_words(words, &mut loader).unwrap();
let module = loader.module();
module
loader.module()
}

View file

@ -205,7 +205,7 @@ impl ValidateTypeSemantics<&TypeInner> for UniqueSemantics {
}
};
return None;
None
}
}
@ -426,13 +426,13 @@ impl NagaReflect {
// Verify types
if self.vertex.global_variables.iter().any(|(_, gv)| {
let ty = &self.vertex.types[gv.ty];
match ty.inner {
!matches!(
ty.inner,
TypeInner::Scalar { .. }
| TypeInner::Vector { .. }
| TypeInner::Matrix { .. }
| TypeInner::Struct { .. } => false,
_ => true,
}
| TypeInner::Vector { .. }
| TypeInner::Matrix { .. }
| TypeInner::Struct { .. }
)
}) {
return Err(ShaderReflectError::VertexSemanticError(
SemanticsErrorKind::InvalidResourceType,
@ -483,7 +483,7 @@ impl NagaReflect {
SemanticsErrorKind::InvalidInputCount(vert_inputs),
));
}
for input in &vertex_entry_point.function.arguments {
if let Some(input) = &vertex_entry_point.function.arguments.first() {
let &Some(Binding::Location { location, .. }) = &input.binding else {
return Err(ShaderReflectError::VertexSemanticError(
SemanticsErrorKind::MissingBinding,
@ -644,7 +644,7 @@ impl NagaReflect {
offset_type: UniformMemberBlock,
blame: SemanticErrorBlame,
) -> Result<(), ShaderReflectError> {
let reachable = Self::collect_uniform_names(&module, resource, blame)?;
let reachable = Self::collect_uniform_names(module, resource, blame)?;
let resource = &module.global_variables[resource];
@ -824,7 +824,7 @@ impl NagaReflect {
Ok(TextureData {
// id: texture.id,
// descriptor_set,
name: &name,
name,
binding: binding.binding,
})
}
@ -1002,44 +1002,3 @@ impl ReflectShader for NagaReflect {
})
}
}
#[cfg(test)]
mod test {
use crate::reflect::semantics::{Semantic, TextureSemantics, UniformSemantic};
use librashader_common::map::FastHashMap;
use librashader_preprocess::ShaderSource;
use librashader_presets::ShaderPreset;
// #[test]
// pub fn test_into() {
// let result = ShaderSource::load("../test/slang-shaders/crt/shaders/crt-royale/src/crt-royale-scanlines-horizontal-apply-mask.slang").unwrap();
// let compilation = crate::front::GlslangCompilation::try_from(&result).unwrap();
//
// let mut loader = rspirv::dr::Loader::new();
// rspirv::binary::parse_words(compilation.vertex.as_binary(), &mut loader).unwrap();
// let module = loader.module();
//
// let outputs: Vec<&Instruction> = module
// .types_global_values
// .iter()
// .filter(|i| i.class.opcode == Op::Variable)
// .collect();
//
// println!("{outputs:#?}");
// }
// #[test]
// pub fn mega_bezel_reflect() {
// let preset = ShaderPreset::try_parse(
// "../test/shaders_slang/bezel/Mega_Bezel/Presets/MBZ__0__SMOOTH-ADV.slangp",
// )
// .unwrap();
//
// let mut uniform_semantics: FastHashMap<String, UniformSemantic> = Default::default();
// let mut texture_semantics: FastHashMap<String, Semantic<TextureSemantics>> = Default::default();
//
//
//
//
// }
}

View file

@ -53,13 +53,13 @@ impl CompileShader<MSL> for NagaReflect {
) -> Result<(String, TranslationInfo), ShaderCompileError> {
let mut valid =
naga::valid::Validator::new(ValidationFlags::all(), Capabilities::empty());
let info = valid.validate(&module)?;
let info = valid.validate(module)?;
let pipeline_options = PipelineOptions {
allow_and_force_point_size: false,
};
let msl = naga::back::msl::write_string(&module, &info, &options, &pipeline_options)?;
let msl = naga::back::msl::write_string(module, &info, &options, &pipeline_options)?;
Ok(msl)
}

View file

@ -22,12 +22,14 @@ impl CompileShader<SPIRV> for NagaReflect {
) -> Result<Vec<u32>, ShaderCompileError> {
let mut valid =
naga::valid::Validator::new(ValidationFlags::all(), Capabilities::empty());
let info = valid.validate(&module)?;
let mut options = naga::back::spv::Options::default();
options.lang_version = version;
let info = valid.validate(module)?;
let options = naga::back::spv::Options {
lang_version: version,
..Default::default()
};
let spv = naga::back::spv::write_vec(
&module,
module,
&info,
&options,
Some(&PipelineOptions {

View file

@ -16,7 +16,7 @@ impl CompileShader<WGSL> for NagaReflect {
options: Self::Options,
) -> Result<ShaderCompilerOutput<String, Self::Context>, ShaderCompileError> {
fn write_wgsl(module: &Module, info: &ModuleInfo) -> Result<String, ShaderCompileError> {
let wgsl = naga::back::wgsl::write_string(&module, &info, WriterFlags::empty())?;
let wgsl = naga::back::wgsl::write_string(module, info, WriterFlags::empty())?;
Ok(wgsl)
}

View file

@ -95,8 +95,8 @@ where
type UniformOffset: ContextOffset<H, C, Self::DeviceContext>;
/// Bind a texture to the input descriptor set
fn bind_texture<'a>(
descriptors: &mut Self::DescriptorSet<'a>,
fn bind_texture(
descriptors: &mut Self::DescriptorSet<'_>,
samplers: &Self::SamplerSet,
binding: &TextureBinding,
texture: &Self::InputTexture,
@ -105,11 +105,11 @@ where
#[allow(clippy::too_many_arguments)]
/// Write uniform and texture semantics to the provided storages.
fn bind_semantics<'a>(
fn bind_semantics(
device: &Self::DeviceContext,
sampler_set: &Self::SamplerSet,
uniform_storage: &mut UniformStorage<H, C, U, P, Self::DeviceContext>,
descriptor_set: &mut Self::DescriptorSet<'a>,
descriptor_set: &mut Self::DescriptorSet<'_>,
uniform_inputs: UniformInputs<'_>,
original: &Self::InputTexture,
source: &Self::InputTexture,

View file

@ -14,7 +14,7 @@ impl<'a, F, I, E> FramebufferInit<'a, F, I, E> {
/// Create a new framebuffer initializer with the given
/// closures to create owned framebuffers and image views.
pub fn new(
filters: impl Iterator<Item = &'a BindingMeta> + ExactSizeIterator,
filters: impl ExactSizeIterator<Item = &'a BindingMeta>,
owned_generator: &'a dyn Fn() -> Result<F, E>,
input_generator: &'a dyn Fn() -> I,
) -> Self {

View file

@ -7,8 +7,8 @@ pub trait FilterChainParameters {
fn set_enabled_pass_count(&mut self, count: usize);
/// Enumerates the active parameters as well as their values in the current filter chain.
fn enumerate_parameters<'a>(
&'a self,
fn enumerate_parameters(
&self,
) -> ::librashader_common::map::halfbrown::Iter<String, f32>;
/// Get the value of the given parameter if present.

View file

@ -39,6 +39,16 @@ pub struct InlineRingBuffer<T, const SIZE: usize> {
index: usize,
}
impl<T, const SIZE: usize> Default for InlineRingBuffer<T, SIZE>
where
T: Copy,
T: Default,
{
fn default() -> Self {
Self::new()
}
}
impl<T, const SIZE: usize> InlineRingBuffer<T, SIZE>
where
T: Copy,