mirror of
https://github.com/italicsjenga/agb.git
synced 2024-12-24 00:31:34 +11:00
support loading multiple
This commit is contained in:
parent
b3db55330a
commit
6e5cee1e3f
|
@ -1,3 +1,11 @@
|
||||||
|
use std::{
|
||||||
|
fs::File,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
process::Command,
|
||||||
|
str,
|
||||||
|
};
|
||||||
|
|
||||||
|
use image::DynamicImage;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
|
@ -32,8 +40,7 @@ pub enum Direction {
|
||||||
Pingpong,
|
Pingpong,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize, Clone)]
|
||||||
|
|
||||||
pub struct FrameTag {
|
pub struct FrameTag {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub from: u32,
|
pub from: u32,
|
||||||
|
@ -41,16 +48,54 @@ pub struct FrameTag {
|
||||||
pub direction: Direction,
|
pub direction: Direction,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize, Clone)]
|
||||||
pub struct Frame {
|
pub struct Frame {
|
||||||
pub frame: Frame2,
|
pub frame: Frame2,
|
||||||
pub trimmed: bool,
|
pub trimmed: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize, Clone)]
|
||||||
pub struct Frame2 {
|
pub struct Frame2 {
|
||||||
pub x: u32,
|
pub x: u32,
|
||||||
pub y: u32,
|
pub y: u32,
|
||||||
pub w: u32,
|
pub w: u32,
|
||||||
pub h: u32,
|
pub h: u32,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn generate_from_file(filename: &str) -> (Aseprite, DynamicImage) {
|
||||||
|
let out_dir = std::env::var("OUT_DIR").expect("Expected OUT_DIR");
|
||||||
|
|
||||||
|
let output_filename = Path::new(&out_dir).join(&*filename);
|
||||||
|
let image_output = output_filename.with_extension("png");
|
||||||
|
let json_output = output_filename.with_extension("json");
|
||||||
|
|
||||||
|
let command = Command::new("aseprite")
|
||||||
|
.args([
|
||||||
|
&PathBuf::from("-b"),
|
||||||
|
&PathBuf::from(filename),
|
||||||
|
&"--sheet".into(),
|
||||||
|
&image_output,
|
||||||
|
&"--format".into(),
|
||||||
|
&"json-array".into(),
|
||||||
|
&"--data".into(),
|
||||||
|
&json_output,
|
||||||
|
&"--list-tags".into(),
|
||||||
|
])
|
||||||
|
.output()
|
||||||
|
.expect("Could not run aseprite");
|
||||||
|
assert!(
|
||||||
|
command.status.success(),
|
||||||
|
"Aseprite did not complete successfully : {}",
|
||||||
|
str::from_utf8(&*command.stdout).unwrap_or("Output contains invalid string")
|
||||||
|
);
|
||||||
|
|
||||||
|
let json: Aseprite = serde_json::from_reader(
|
||||||
|
File::open(&json_output).expect("The json output from aseprite could not be openned"),
|
||||||
|
)
|
||||||
|
.expect("The output from aseprite could not be decoded");
|
||||||
|
|
||||||
|
(
|
||||||
|
json,
|
||||||
|
image::open(image_output).expect("Image should be readable"),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
|
@ -13,6 +13,10 @@ pub(crate) struct Image {
|
||||||
impl Image {
|
impl Image {
|
||||||
pub fn load_from_file(image_path: &path::Path) -> Self {
|
pub fn load_from_file(image_path: &path::Path) -> Self {
|
||||||
let img = image::open(image_path).expect("Expected image to exist");
|
let img = image::open(image_path).expect("Expected image to exist");
|
||||||
|
Self::load_from_dyn_image(img)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn load_from_dyn_image(img: image::DynamicImage) -> Self {
|
||||||
let (width, height) = img.dimensions();
|
let (width, height) = img.dimensions();
|
||||||
|
|
||||||
let width = width as usize;
|
let width = width as usize;
|
||||||
|
|
|
@ -1,16 +1,12 @@
|
||||||
use palette16::Palette16OptimisationResults;
|
use palette16::Palette16OptimisationResults;
|
||||||
use proc_macro::TokenStream;
|
use proc_macro::TokenStream;
|
||||||
use syn::parse_macro_input;
|
use proc_macro2::Literal;
|
||||||
|
use syn::parse::Parser;
|
||||||
|
use syn::{parse_macro_input, punctuated::Punctuated, LitStr};
|
||||||
|
|
||||||
use std::{
|
use std::{iter, path::Path, str};
|
||||||
fs::File,
|
|
||||||
iter,
|
|
||||||
path::{Path, PathBuf},
|
|
||||||
process::Command,
|
|
||||||
str,
|
|
||||||
};
|
|
||||||
|
|
||||||
use quote::{format_ident, quote};
|
use quote::{format_ident, quote, ToTokens};
|
||||||
|
|
||||||
mod aseprite;
|
mod aseprite;
|
||||||
mod colour;
|
mod colour;
|
||||||
|
@ -78,60 +74,65 @@ pub fn include_gfx(input: TokenStream) -> TokenStream {
|
||||||
TokenStream::from(module)
|
TokenStream::from(module)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
use quote::TokenStreamExt;
|
||||||
|
struct ByteString<'a>(&'a [u8]);
|
||||||
|
impl ToTokens for ByteString<'_> {
|
||||||
|
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
|
||||||
|
tokens.append(Literal::byte_string(self.0));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[proc_macro]
|
#[proc_macro]
|
||||||
pub fn include_aseprite_inner(input: TokenStream) -> TokenStream {
|
pub fn include_aseprite_inner(input: TokenStream) -> TokenStream {
|
||||||
let input = parse_macro_input!(input as syn::LitStr);
|
let parser = Punctuated::<LitStr, syn::Token![,]>::parse_separated_nonempty;
|
||||||
let filename = input.value();
|
let parsed = match parser.parse(input) {
|
||||||
|
Ok(e) => e,
|
||||||
|
Err(e) => return e.to_compile_error().into(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut optimiser = palette16::Palette16Optimiser::new();
|
||||||
|
let mut images = Vec::new();
|
||||||
|
let mut frames = Vec::new();
|
||||||
|
let mut tags = Vec::new();
|
||||||
|
|
||||||
let root = std::env::var("CARGO_MANIFEST_DIR").expect("Failed to get cargo manifest dir");
|
let root = std::env::var("CARGO_MANIFEST_DIR").expect("Failed to get cargo manifest dir");
|
||||||
let path = Path::new(&root).join(&*filename);
|
|
||||||
|
|
||||||
let out_dir = std::env::var("OUT_DIR").expect("Expected OUT_DIR");
|
let filenames: Vec<String> = parsed
|
||||||
|
.iter()
|
||||||
|
.map(|s| s.value())
|
||||||
|
.map(|s| {
|
||||||
|
Path::new(&root)
|
||||||
|
.join(&*s)
|
||||||
|
.as_path()
|
||||||
|
.to_string_lossy()
|
||||||
|
.into_owned()
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
let output_filename = Path::new(&out_dir).join(&*filename);
|
for filename in filenames.iter() {
|
||||||
let image_output = output_filename.with_extension("png");
|
let (json, image) = aseprite::generate_from_file(filename);
|
||||||
let json_output = output_filename.with_extension("json");
|
let tile_size = json.frames[0].frame.w;
|
||||||
|
|
||||||
let command = Command::new("aseprite")
|
for frame in json.frames.iter() {
|
||||||
.args([
|
assert!(frame.frame.w == tile_size);
|
||||||
&PathBuf::from("-b"),
|
|
||||||
&path,
|
|
||||||
&"--sheet".into(),
|
|
||||||
&image_output,
|
|
||||||
&"--format".into(),
|
|
||||||
&"json-array".into(),
|
|
||||||
&"--data".into(),
|
|
||||||
&json_output,
|
|
||||||
&"--list-tags".into(),
|
|
||||||
])
|
|
||||||
.output()
|
|
||||||
.expect("Could not run aseprite");
|
|
||||||
assert!(
|
assert!(
|
||||||
command.status.success(),
|
frame.frame.w == frame.frame.h
|
||||||
"Aseprite did not complete successfully : {}",
|
&& frame.frame.w.is_power_of_two()
|
||||||
str::from_utf8(&*command.stdout).unwrap_or("Output contains invalid string")
|
&& frame.frame.w <= 32
|
||||||
);
|
);
|
||||||
|
}
|
||||||
|
|
||||||
let json: aseprite::Aseprite = serde_json::from_reader(
|
let image = Image::load_from_dyn_image(image);
|
||||||
File::open(&json_output).expect("The json output from aseprite could not be openned"),
|
|
||||||
)
|
|
||||||
.expect("The output from aseprite could not be decoded");
|
|
||||||
|
|
||||||
// check that the size of the sprites are valid
|
add_to_optimiser(&mut optimiser, &image, tile_size as usize);
|
||||||
|
images.push(image);
|
||||||
|
frames.push(json.frames.clone());
|
||||||
|
tags.push(json.meta.frame_tags.clone());
|
||||||
|
}
|
||||||
|
|
||||||
assert!(
|
let optimised_results = optimiser.optimise_palettes(None);
|
||||||
json.frames[0].frame.w == json.frames[0].frame.h
|
|
||||||
&& json.frames[0].frame.w.is_power_of_two()
|
|
||||||
&& json.frames[0].frame.w <= 32
|
|
||||||
);
|
|
||||||
|
|
||||||
let image = Image::load_from_file(image_output.as_path());
|
let (palette_data, tile_data, assignments) = palete_tile_data(&optimised_results, &images);
|
||||||
|
|
||||||
let optimised_results =
|
|
||||||
optimiser_for_image(&image, json.frames[0].frame.w as usize).optimise_palettes(None);
|
|
||||||
|
|
||||||
let (palette_data, tile_data, assignments) =
|
|
||||||
palete_tile_data(&optimised_results, json.frames[0].frame.w as usize, &image);
|
|
||||||
|
|
||||||
let palette_data = palette_data.iter().map(|colours| {
|
let palette_data = palette_data.iter().map(|colours| {
|
||||||
quote! {
|
quote! {
|
||||||
|
@ -142,31 +143,34 @@ pub fn include_aseprite_inner(input: TokenStream) -> TokenStream {
|
||||||
});
|
});
|
||||||
|
|
||||||
let mut pre = 0;
|
let mut pre = 0;
|
||||||
let sprites = json
|
let sprites = frames
|
||||||
.frames
|
|
||||||
.iter()
|
.iter()
|
||||||
|
.flatten()
|
||||||
.zip(assignments.iter())
|
.zip(assignments.iter())
|
||||||
.map(|(f, assignment)| {
|
.map(|(f, assignment)| {
|
||||||
let start: usize = pre;
|
let start: usize = pre;
|
||||||
let end: usize = pre + (f.frame.w as usize / 8) * (f.frame.h as usize / 8) * 32;
|
let end: usize = pre + (f.frame.w as usize / 8) * (f.frame.h as usize / 8) * 32;
|
||||||
let data = &tile_data[start..end];
|
let data = ByteString(&tile_data[start..end]);
|
||||||
pre = end;
|
pre = end;
|
||||||
let width = f.frame.w as usize;
|
let width = f.frame.w as usize;
|
||||||
let height = f.frame.h as usize;
|
let height = f.frame.h as usize;
|
||||||
quote! {
|
quote! {
|
||||||
Sprite::new(
|
Sprite::new(
|
||||||
&PALETTES[#assignment],
|
&PALETTES[#assignment],
|
||||||
&[
|
#data,
|
||||||
#(#data),*
|
|
||||||
],
|
|
||||||
Size::from_width_height(#width, #height)
|
Size::from_width_height(#width, #height)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
let tags = json.meta.frame_tags.iter().map(|tag| {
|
let tags = tags
|
||||||
let start = tag.from as usize;
|
.iter()
|
||||||
let end = tag.to as usize;
|
.enumerate()
|
||||||
|
.map(|(i, tag)| {
|
||||||
|
tag.iter().map(move |tag| (i, tag)).map(|(i, tag)| {
|
||||||
|
let offset: usize = frames[0..i].iter().map(|s| s.len()).sum();
|
||||||
|
let start = tag.from as usize + offset;
|
||||||
|
let end = tag.to as usize + offset;
|
||||||
let direction = tag.direction as usize;
|
let direction = tag.direction as usize;
|
||||||
|
|
||||||
let name = &tag.name;
|
let name = &tag.name;
|
||||||
|
@ -175,12 +179,18 @@ pub fn include_aseprite_inner(input: TokenStream) -> TokenStream {
|
||||||
quote! {
|
quote! {
|
||||||
#name => Tag::new(SPRITES, #start, #end, #direction)
|
#name => Tag::new(SPRITES, #start, #end, #direction)
|
||||||
}
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.flatten();
|
||||||
|
|
||||||
|
let include_paths = filenames.iter().map(|s| {
|
||||||
|
quote! {
|
||||||
|
const _: &[u8] = include_bytes!(#s);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
let include_path = path.to_string_lossy();
|
|
||||||
|
|
||||||
let module = quote! {
|
let module = quote! {
|
||||||
const _: &[u8] = include_bytes!(#include_path);
|
#(#include_paths)*
|
||||||
|
|
||||||
|
|
||||||
const PALETTES: &[Palette16] = &[
|
const PALETTES: &[Palette16] = &[
|
||||||
|
@ -230,11 +240,19 @@ fn convert_image(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn optimiser_for_image(image: &Image, tile_size: usize) -> palette16::Palette16Optimiser {
|
fn optimiser_for_image(image: &Image, tile_size: usize) -> palette16::Palette16Optimiser {
|
||||||
|
let mut palette_optimiser = palette16::Palette16Optimiser::new();
|
||||||
|
add_to_optimiser(&mut palette_optimiser, image, tile_size);
|
||||||
|
palette_optimiser
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_to_optimiser(
|
||||||
|
palette_optimiser: &mut palette16::Palette16Optimiser,
|
||||||
|
image: &Image,
|
||||||
|
tile_size: usize,
|
||||||
|
) {
|
||||||
let tiles_x = image.width / tile_size;
|
let tiles_x = image.width / tile_size;
|
||||||
let tiles_y = image.height / tile_size;
|
let tiles_y = image.height / tile_size;
|
||||||
|
|
||||||
let mut palette_optimiser = palette16::Palette16Optimiser::new();
|
|
||||||
|
|
||||||
for y in 0..tiles_y {
|
for y in 0..tiles_y {
|
||||||
for x in 0..tiles_x {
|
for x in 0..tiles_x {
|
||||||
let mut palette = palette16::Palette16::new();
|
let mut palette = palette16::Palette16::new();
|
||||||
|
@ -250,14 +268,11 @@ fn optimiser_for_image(image: &Image, tile_size: usize) -> palette16::Palette16O
|
||||||
palette_optimiser.add_palette(palette);
|
palette_optimiser.add_palette(palette);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
palette_optimiser
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn palete_tile_data(
|
fn palete_tile_data(
|
||||||
optimiser: &Palette16OptimisationResults,
|
optimiser: &Palette16OptimisationResults,
|
||||||
tile_size: usize,
|
images: &[Image],
|
||||||
image: &Image,
|
|
||||||
) -> (Vec<Vec<u16>>, Vec<u8>, Vec<usize>) {
|
) -> (Vec<Vec<u16>>, Vec<u8>, Vec<usize>) {
|
||||||
let palette_data: Vec<Vec<u16>> = optimiser
|
let palette_data: Vec<Vec<u16>> = optimiser
|
||||||
.optimised_palettes
|
.optimised_palettes
|
||||||
|
@ -274,11 +289,13 @@ fn palete_tile_data(
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
|
let mut tile_data = Vec::new();
|
||||||
|
|
||||||
|
for image in images {
|
||||||
|
let tile_size = image.height;
|
||||||
let tiles_x = image.width / tile_size;
|
let tiles_x = image.width / tile_size;
|
||||||
let tiles_y = image.height / tile_size;
|
let tiles_y = image.height / tile_size;
|
||||||
|
|
||||||
let mut tile_data = vec![];
|
|
||||||
|
|
||||||
for y in 0..tiles_y {
|
for y in 0..tiles_y {
|
||||||
for x in 0..tiles_x {
|
for x in 0..tiles_x {
|
||||||
let palette_index = optimiser.assignments[y * tiles_x + x];
|
let palette_index = optimiser.assignments[y * tiles_x + x];
|
||||||
|
@ -296,6 +313,7 @@ fn palete_tile_data(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let tile_data = tile_data
|
let tile_data = tile_data
|
||||||
.chunks(2)
|
.chunks(2)
|
||||||
|
|
Loading…
Reference in a new issue