mirror of
https://github.com/italicsjenga/agb.git
synced 2024-12-24 00:31:34 +11:00
support loading multiple
This commit is contained in:
parent
b3db55330a
commit
6e5cee1e3f
|
@ -1,3 +1,11 @@
|
|||
use std::{
|
||||
fs::File,
|
||||
path::{Path, PathBuf},
|
||||
process::Command,
|
||||
str,
|
||||
};
|
||||
|
||||
use image::DynamicImage;
|
||||
use serde::Deserialize;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
|
@ -32,8 +40,7 @@ pub enum Direction {
|
|||
Pingpong,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
|
||||
#[derive(Deserialize, Clone)]
|
||||
pub struct FrameTag {
|
||||
pub name: String,
|
||||
pub from: u32,
|
||||
|
@ -41,16 +48,54 @@ pub struct FrameTag {
|
|||
pub direction: Direction,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[derive(Deserialize, Clone)]
|
||||
pub struct Frame {
|
||||
pub frame: Frame2,
|
||||
pub trimmed: bool,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[derive(Deserialize, Clone)]
|
||||
pub struct Frame2 {
|
||||
pub x: u32,
|
||||
pub y: u32,
|
||||
pub w: u32,
|
||||
pub h: u32,
|
||||
}
|
||||
|
||||
pub fn generate_from_file(filename: &str) -> (Aseprite, DynamicImage) {
|
||||
let out_dir = std::env::var("OUT_DIR").expect("Expected OUT_DIR");
|
||||
|
||||
let output_filename = Path::new(&out_dir).join(&*filename);
|
||||
let image_output = output_filename.with_extension("png");
|
||||
let json_output = output_filename.with_extension("json");
|
||||
|
||||
let command = Command::new("aseprite")
|
||||
.args([
|
||||
&PathBuf::from("-b"),
|
||||
&PathBuf::from(filename),
|
||||
&"--sheet".into(),
|
||||
&image_output,
|
||||
&"--format".into(),
|
||||
&"json-array".into(),
|
||||
&"--data".into(),
|
||||
&json_output,
|
||||
&"--list-tags".into(),
|
||||
])
|
||||
.output()
|
||||
.expect("Could not run aseprite");
|
||||
assert!(
|
||||
command.status.success(),
|
||||
"Aseprite did not complete successfully : {}",
|
||||
str::from_utf8(&*command.stdout).unwrap_or("Output contains invalid string")
|
||||
);
|
||||
|
||||
let json: Aseprite = serde_json::from_reader(
|
||||
File::open(&json_output).expect("The json output from aseprite could not be openned"),
|
||||
)
|
||||
.expect("The output from aseprite could not be decoded");
|
||||
|
||||
(
|
||||
json,
|
||||
image::open(image_output).expect("Image should be readable"),
|
||||
)
|
||||
}
|
||||
|
|
|
@ -13,6 +13,10 @@ pub(crate) struct Image {
|
|||
impl Image {
|
||||
pub fn load_from_file(image_path: &path::Path) -> Self {
|
||||
let img = image::open(image_path).expect("Expected image to exist");
|
||||
Self::load_from_dyn_image(img)
|
||||
}
|
||||
|
||||
pub fn load_from_dyn_image(img: image::DynamicImage) -> Self {
|
||||
let (width, height) = img.dimensions();
|
||||
|
||||
let width = width as usize;
|
||||
|
|
|
@ -1,16 +1,12 @@
|
|||
use palette16::Palette16OptimisationResults;
|
||||
use proc_macro::TokenStream;
|
||||
use syn::parse_macro_input;
|
||||
use proc_macro2::Literal;
|
||||
use syn::parse::Parser;
|
||||
use syn::{parse_macro_input, punctuated::Punctuated, LitStr};
|
||||
|
||||
use std::{
|
||||
fs::File,
|
||||
iter,
|
||||
path::{Path, PathBuf},
|
||||
process::Command,
|
||||
str,
|
||||
};
|
||||
use std::{iter, path::Path, str};
|
||||
|
||||
use quote::{format_ident, quote};
|
||||
use quote::{format_ident, quote, ToTokens};
|
||||
|
||||
mod aseprite;
|
||||
mod colour;
|
||||
|
@ -78,60 +74,65 @@ pub fn include_gfx(input: TokenStream) -> TokenStream {
|
|||
TokenStream::from(module)
|
||||
}
|
||||
|
||||
use quote::TokenStreamExt;
|
||||
struct ByteString<'a>(&'a [u8]);
|
||||
impl ToTokens for ByteString<'_> {
|
||||
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
|
||||
tokens.append(Literal::byte_string(self.0));
|
||||
}
|
||||
}
|
||||
|
||||
#[proc_macro]
|
||||
pub fn include_aseprite_inner(input: TokenStream) -> TokenStream {
|
||||
let input = parse_macro_input!(input as syn::LitStr);
|
||||
let filename = input.value();
|
||||
let parser = Punctuated::<LitStr, syn::Token![,]>::parse_separated_nonempty;
|
||||
let parsed = match parser.parse(input) {
|
||||
Ok(e) => e,
|
||||
Err(e) => return e.to_compile_error().into(),
|
||||
};
|
||||
|
||||
let mut optimiser = palette16::Palette16Optimiser::new();
|
||||
let mut images = Vec::new();
|
||||
let mut frames = Vec::new();
|
||||
let mut tags = Vec::new();
|
||||
|
||||
let root = std::env::var("CARGO_MANIFEST_DIR").expect("Failed to get cargo manifest dir");
|
||||
let path = Path::new(&root).join(&*filename);
|
||||
|
||||
let out_dir = std::env::var("OUT_DIR").expect("Expected OUT_DIR");
|
||||
let filenames: Vec<String> = parsed
|
||||
.iter()
|
||||
.map(|s| s.value())
|
||||
.map(|s| {
|
||||
Path::new(&root)
|
||||
.join(&*s)
|
||||
.as_path()
|
||||
.to_string_lossy()
|
||||
.into_owned()
|
||||
})
|
||||
.collect();
|
||||
|
||||
let output_filename = Path::new(&out_dir).join(&*filename);
|
||||
let image_output = output_filename.with_extension("png");
|
||||
let json_output = output_filename.with_extension("json");
|
||||
for filename in filenames.iter() {
|
||||
let (json, image) = aseprite::generate_from_file(filename);
|
||||
let tile_size = json.frames[0].frame.w;
|
||||
|
||||
let command = Command::new("aseprite")
|
||||
.args([
|
||||
&PathBuf::from("-b"),
|
||||
&path,
|
||||
&"--sheet".into(),
|
||||
&image_output,
|
||||
&"--format".into(),
|
||||
&"json-array".into(),
|
||||
&"--data".into(),
|
||||
&json_output,
|
||||
&"--list-tags".into(),
|
||||
])
|
||||
.output()
|
||||
.expect("Could not run aseprite");
|
||||
for frame in json.frames.iter() {
|
||||
assert!(frame.frame.w == tile_size);
|
||||
assert!(
|
||||
command.status.success(),
|
||||
"Aseprite did not complete successfully : {}",
|
||||
str::from_utf8(&*command.stdout).unwrap_or("Output contains invalid string")
|
||||
frame.frame.w == frame.frame.h
|
||||
&& frame.frame.w.is_power_of_two()
|
||||
&& frame.frame.w <= 32
|
||||
);
|
||||
}
|
||||
|
||||
let json: aseprite::Aseprite = serde_json::from_reader(
|
||||
File::open(&json_output).expect("The json output from aseprite could not be openned"),
|
||||
)
|
||||
.expect("The output from aseprite could not be decoded");
|
||||
let image = Image::load_from_dyn_image(image);
|
||||
|
||||
// check that the size of the sprites are valid
|
||||
add_to_optimiser(&mut optimiser, &image, tile_size as usize);
|
||||
images.push(image);
|
||||
frames.push(json.frames.clone());
|
||||
tags.push(json.meta.frame_tags.clone());
|
||||
}
|
||||
|
||||
assert!(
|
||||
json.frames[0].frame.w == json.frames[0].frame.h
|
||||
&& json.frames[0].frame.w.is_power_of_two()
|
||||
&& json.frames[0].frame.w <= 32
|
||||
);
|
||||
let optimised_results = optimiser.optimise_palettes(None);
|
||||
|
||||
let image = Image::load_from_file(image_output.as_path());
|
||||
|
||||
let optimised_results =
|
||||
optimiser_for_image(&image, json.frames[0].frame.w as usize).optimise_palettes(None);
|
||||
|
||||
let (palette_data, tile_data, assignments) =
|
||||
palete_tile_data(&optimised_results, json.frames[0].frame.w as usize, &image);
|
||||
let (palette_data, tile_data, assignments) = palete_tile_data(&optimised_results, &images);
|
||||
|
||||
let palette_data = palette_data.iter().map(|colours| {
|
||||
quote! {
|
||||
|
@ -142,31 +143,34 @@ pub fn include_aseprite_inner(input: TokenStream) -> TokenStream {
|
|||
});
|
||||
|
||||
let mut pre = 0;
|
||||
let sprites = json
|
||||
.frames
|
||||
let sprites = frames
|
||||
.iter()
|
||||
.flatten()
|
||||
.zip(assignments.iter())
|
||||
.map(|(f, assignment)| {
|
||||
let start: usize = pre;
|
||||
let end: usize = pre + (f.frame.w as usize / 8) * (f.frame.h as usize / 8) * 32;
|
||||
let data = &tile_data[start..end];
|
||||
let data = ByteString(&tile_data[start..end]);
|
||||
pre = end;
|
||||
let width = f.frame.w as usize;
|
||||
let height = f.frame.h as usize;
|
||||
quote! {
|
||||
Sprite::new(
|
||||
&PALETTES[#assignment],
|
||||
&[
|
||||
#(#data),*
|
||||
],
|
||||
#data,
|
||||
Size::from_width_height(#width, #height)
|
||||
)
|
||||
}
|
||||
});
|
||||
|
||||
let tags = json.meta.frame_tags.iter().map(|tag| {
|
||||
let start = tag.from as usize;
|
||||
let end = tag.to as usize;
|
||||
let tags = tags
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, tag)| {
|
||||
tag.iter().map(move |tag| (i, tag)).map(|(i, tag)| {
|
||||
let offset: usize = frames[0..i].iter().map(|s| s.len()).sum();
|
||||
let start = tag.from as usize + offset;
|
||||
let end = tag.to as usize + offset;
|
||||
let direction = tag.direction as usize;
|
||||
|
||||
let name = &tag.name;
|
||||
|
@ -175,12 +179,18 @@ pub fn include_aseprite_inner(input: TokenStream) -> TokenStream {
|
|||
quote! {
|
||||
#name => Tag::new(SPRITES, #start, #end, #direction)
|
||||
}
|
||||
})
|
||||
})
|
||||
.flatten();
|
||||
|
||||
let include_paths = filenames.iter().map(|s| {
|
||||
quote! {
|
||||
const _: &[u8] = include_bytes!(#s);
|
||||
}
|
||||
});
|
||||
|
||||
let include_path = path.to_string_lossy();
|
||||
|
||||
let module = quote! {
|
||||
const _: &[u8] = include_bytes!(#include_path);
|
||||
#(#include_paths)*
|
||||
|
||||
|
||||
const PALETTES: &[Palette16] = &[
|
||||
|
@ -230,11 +240,19 @@ fn convert_image(
|
|||
}
|
||||
|
||||
fn optimiser_for_image(image: &Image, tile_size: usize) -> palette16::Palette16Optimiser {
|
||||
let mut palette_optimiser = palette16::Palette16Optimiser::new();
|
||||
add_to_optimiser(&mut palette_optimiser, image, tile_size);
|
||||
palette_optimiser
|
||||
}
|
||||
|
||||
fn add_to_optimiser(
|
||||
palette_optimiser: &mut palette16::Palette16Optimiser,
|
||||
image: &Image,
|
||||
tile_size: usize,
|
||||
) {
|
||||
let tiles_x = image.width / tile_size;
|
||||
let tiles_y = image.height / tile_size;
|
||||
|
||||
let mut palette_optimiser = palette16::Palette16Optimiser::new();
|
||||
|
||||
for y in 0..tiles_y {
|
||||
for x in 0..tiles_x {
|
||||
let mut palette = palette16::Palette16::new();
|
||||
|
@ -250,14 +268,11 @@ fn optimiser_for_image(image: &Image, tile_size: usize) -> palette16::Palette16O
|
|||
palette_optimiser.add_palette(palette);
|
||||
}
|
||||
}
|
||||
|
||||
palette_optimiser
|
||||
}
|
||||
|
||||
fn palete_tile_data(
|
||||
optimiser: &Palette16OptimisationResults,
|
||||
tile_size: usize,
|
||||
image: &Image,
|
||||
images: &[Image],
|
||||
) -> (Vec<Vec<u16>>, Vec<u8>, Vec<usize>) {
|
||||
let palette_data: Vec<Vec<u16>> = optimiser
|
||||
.optimised_palettes
|
||||
|
@ -274,11 +289,13 @@ fn palete_tile_data(
|
|||
})
|
||||
.collect();
|
||||
|
||||
let mut tile_data = Vec::new();
|
||||
|
||||
for image in images {
|
||||
let tile_size = image.height;
|
||||
let tiles_x = image.width / tile_size;
|
||||
let tiles_y = image.height / tile_size;
|
||||
|
||||
let mut tile_data = vec![];
|
||||
|
||||
for y in 0..tiles_y {
|
||||
for x in 0..tiles_x {
|
||||
let palette_index = optimiser.assignments[y * tiles_x + x];
|
||||
|
@ -296,6 +313,7 @@ fn palete_tile_data(
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let tile_data = tile_data
|
||||
.chunks(2)
|
||||
|
|
Loading…
Reference in a new issue