use clap::Parser; use std::collections::HashMap; use std::path::{Path, PathBuf}; use rayon::prelude::*; use serde::{Deserialize, Serialize}; use thiserror::Error; use crunch_cli::utils::{load_image, make_paths, normalize_path, ColourPalette, PaletteMap}; use crate::cli_args::Args; use crate::commands::{Palette, Remap}; #[derive(Error, Debug)] pub enum PipelineError { #[error("Use a file ending with '.toml' or '.json' to configure your pipeline")] FormatDetection, } #[derive(Clone, Debug, Serialize, Deserialize)] #[serde(untagged)] pub enum PipelineType { Pipeline { input_path: String, output_path: String, actions: Vec<Args>, }, Ref { input_path: String, output_path: String, reference: String, }, GlobRef { pattern: String, output_dir: String, reference: String, }, } #[derive(Clone, Debug, Serialize, Deserialize)] pub struct PipelineRef { pub actions: Vec<Args>, } #[derive(Clone, Debug, Serialize, Deserialize)] pub struct PipelineFile { pub refs: HashMap<String, PipelineRef>, pub pipelines: Vec<PipelineType>, } /// Execute a predefined pipeline #[derive(Debug, Clone, Parser, Serialize, Deserialize)] #[clap(author, version = "0.3.0")] pub struct Pipeline { /// The path to the pipeline definition file #[serde(default)] pub config: String, } macro_rules! result { ($value: expr) => { match $value { Ok(val) => val, Err(e) => { log::error!("{}", e); return; } } }; } impl Pipeline { pub fn execute(&self) -> anyhow::Result<()> { let path = std::env::current_dir().map(|path| path.join(&self.config))?; let path_string = format!("{}", &path.display()); log::debug!("Trying to read from input file: {}", &path.display()); if !&path_string.ends_with(".toml") && !&path_string.ends_with(".json") { Err(PipelineError::FormatDetection)?; } let file_contents = std::fs::read(&path)?; log::debug!("Found correct file type and read bytes, trying to parse"); let pipeline_data: PipelineFile = if path_string.ends_with(".toml") { toml::from_str(String::from_utf8(file_contents)?.as_str())? } else { serde_json::from_slice(&file_contents)? }; log::debug!("Expanding pipeline file into targets"); let base_path = PathBuf::from(path.parent().unwrap()); get_targets(base_path, &pipeline_data).for_each(|(input_path, output_path, actions)| { match make_paths(&output_path) { Ok(_) => {} Err(e) => { log::error!("Failed to create target directory {}; {}", &output_path, e); return; } } if actions.is_empty() { match std::fs::copy(&input_path, &output_path) { Ok(_) => {} Err(e) => { log::error!("Failed to copy {} to {}; {}", input_path, output_path, e); } }; return; } let mut file = result!(load_image(&input_path, None)); log::debug!( "Loaded {}, Executing {} actions", &input_path, actions.len() ); for step in actions { match step { Args::Rotate(rotate) => { file = result!(rotate.run(&file)); } Args::Extrude(extrude) => { file = result!(extrude.run(&file)); } Args::Scale(scale) => { file = result!(scale.run(&file)); } Args::Flip(flip) => { file = result!(flip.run(&file)); } Args::Remap(remap) => { let palette = result!(load_image(&remap.palette, None)); let image_palette = ColourPalette::from(&file); let target_palette = ColourPalette::from(&palette); let mappings = PaletteMap::calculate_mapping(&image_palette, &target_palette); file = result!(Remap::remap_image(file, mappings)); } _ => {} } } let mut outer_target_path = PathBuf::from(&output_path); outer_target_path.pop(); if let Err(e) = std::fs::create_dir(&outer_target_path) { match e.kind() { std::io::ErrorKind::AlreadyExists => { /* This is fine */ } _ => log::error!( "Failed to create containing directory {}; {}", outer_target_path.to_string_lossy(), e ), } } match file.save(&output_path) { Ok(_) => {} Err(e) => { log::error!("Failed to save to {}; {}", output_path, e); } } }); Ok(()) } } fn join<T: AsRef<Path>>(root: &Path, rest: &T) -> String { let path = normalize_path(root.join(rest)); format!("{}", path.display()) } fn get_targets( base_path: PathBuf, pipeline_data: &PipelineFile, ) -> impl ParallelIterator<Item = (String, String, Vec<Args>)> + '_ { pipeline_data .pipelines .par_iter() .flat_map(move |pipe| match pipe { PipelineType::Pipeline { input_path, output_path, actions, } => vec![( join(&base_path, &input_path), join(&base_path, &output_path), actions.clone(), )], PipelineType::Ref { input_path, output_path, reference, } => pipeline_data .refs .get(reference.as_str()) .iter() .map(|value| { ( join(&base_path, &input_path), join(&base_path, &output_path), value.actions.clone(), ) }) .collect(), PipelineType::GlobRef { pattern, output_dir, reference, } => pipeline_data .refs .get(reference.as_str()) .iter() .map(|value| value.actions.clone()) .flat_map(|actions| { let mut paths = Vec::new(); let target_path = join(&base_path, pattern); log::debug!("Mapping glob paths for '{}'", &target_path); for entry in glob::glob(target_path.as_str()).unwrap() { log::debug!("Found a glob match: [{:?}]", entry); paths.push((actions.clone(), entry)); } paths }) .filter_map(|(actions, inner)| inner.ok().map(|p| (actions, p))) .filter_map(|(actions, path)| { if let Some(filename) = path.file_name().and_then(|osstr| osstr.to_str()) { let output_path = Path::new(output_dir.as_str()); let output_path = output_path.join(filename); Some(( join(&base_path, &path), join(&base_path, &output_path), actions, )) } else { None } }) .collect(), }) }