Newer
Older
#[cfg(feature = "ldtk_1_0_0")]
mod data_1_0_0;
#[cfg(any(feature = "ldtk_1_1_1", feature = "ldtk_1_1_0"))]
mod data_1_1_0;
#[cfg(any(feature = "ldtk_1_1_3", feature = "ldtk_1_1_2"))]
mod data_1_1_2;
#[cfg(any(feature = "ldtk_1_2_1", feature = "ldtk_1_2_0"))]
mod data_1_2_1;
#[cfg(any(feature = "ldtk_1_2_3", feature = "ldtk_1_2_2"))]
mod data_1_2_2;
#[cfg(feature = "ldtk_1_2_4")]
mod data_1_2_4;
#[cfg(feature = "ldtk_1_2_5")]
mod data_1_2_5;
#[cfg(any(feature = "ldtk_1_4_1", feature = "ldtk_1_4_0"))]
#[cfg(feature = "ldtk_1_0_0")]
pub use data_1_0_0::*;
#[cfg(any(feature = "ldtk_1_1_1", feature = "ldtk_1_1_0"))]
pub use data_1_1_0::*;
#[cfg(any(feature = "ldtk_1_1_3", feature = "ldtk_1_1_2"))]
pub use data_1_1_2::*;
#[cfg(any(feature = "ldtk_1_2_1", feature = "ldtk_1_2_0"))]
pub use data_1_2_1::*;
#[cfg(any(feature = "ldtk_1_2_3", feature = "ldtk_1_2_2"))]
pub use data_1_2_2::*;
#[cfg(feature = "ldtk_1_2_4")]
pub use data_1_2_4::*;
#[cfg(feature = "ldtk_1_2_5")]
pub use data_1_2_5::*;
#[cfg(any(feature = "ldtk_1_4_1", feature = "ldtk_1_4_0"))]
#[cfg(any(feature = "ldtk_1_5_3"))]
pub use data_1_5_3::*;
pub enum ParseError {
#[error("Failed to parse file: {0}")]
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
pub trait LdtkFromBytes<'a>: Deserialize<'a> {
fn from_bytes(bytes: &'a [u8]) -> Result<Self, ParseError> {
serde_json::from_slice(bytes).map_err(|e| ParseError::SerdeError(format!("{}", e)))
}
}
macro_rules! impl_from_bytes {
($type: tt) => {
impl<'a> From<&'a [u8]> for $type {
fn from(value: &'a [u8]) -> Self {
#[cfg(feature = "no_panic")]
{
match $type::from_bytes(value) {
Ok(val) => val,
Err(e) => {
log::error!("{}", e);
std::process::abort();
}
}
}
#[cfg(not(feature = "no_panic"))]
{
$type::from_bytes(value).expect("Failed to parse ldtk file")
}
}
}
};
}
impl<'a> LdtkFromBytes<'a> for Level {}
impl<'a> LdtkFromBytes<'a> for Project {}
impl_from_bytes!(Level);
impl_from_bytes!(Project);
#[cfg(feature = "bevy")]
mod _bevy_impl {
use super::*;
use bevy::asset::io::Reader;
use bevy::asset::{AssetLoader, AsyncReadExt, LoadContext, UntypedAssetId, VisitAssetDependencies};
use bevy::prelude::{Asset, Handle};
use bevy::reflect::TypePath;
impl TypePath for Project {
fn type_path() -> &'static str {
"micro_ldtk::ldtk::Project"
}
fn short_type_path() -> &'static str {
"Project"
}
impl VisitAssetDependencies for Project {
fn visit_dependencies(&self, _visit: &mut impl FnMut(UntypedAssetId)) {}
impl TypePath for Level {
fn type_path() -> &'static str {
"micro_ld0tk::ldtk::Level"
}
impl VisitAssetDependencies for Level {
fn visit_dependencies(&self, _visit: &mut impl FnMut(UntypedAssetId)) {}
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
impl Asset for Level {}
#[derive(Asset, TypePath)]
pub struct LevelSet(pub Vec<Handle<Level>>);
#[derive(Default)]
pub struct LdtkLoader;
impl AssetLoader for LdtkLoader {
type Asset = Project;
type Settings = ();
type Error = LdtkLoadError;
async fn load<'a>(
&'a self,
reader: &'a mut Reader<'_>,
_settings: &'a Self::Settings,
load_context: &'a mut LoadContext<'_>,
) -> Result<Self::Asset, Self::Error> {
let mut bytes = Vec::new();
reader.read_to_end(&mut bytes).await?;
let project = Project::from_bytes(bytes.as_slice())?;
let levels = project
.levels
.iter()
.flat_map(|level| {
log::debug!(
"Checking if level is external: {} [{}]",
level.identifier,
level.external_rel_path.is_some()
);
level
.external_rel_path
.as_ref()
.map(|path| (level.identifier.clone(), path))
})
.collect::<Vec<(String, &String)>>();
let parent_path = load_context.path().parent().map(|pp| pp.to_path_buf());
let mut level_set = Vec::with_capacity(levels.len());
for (_, path) in levels {
level_set.push(match &parent_path {
Some(parent) => load_context.load::<Level>(parent.join(path)),
None => load_context.load::<Level>(path),
});
}
load_context.add_labeled_asset(
format!("{}ExternalLevels", project.iid),
LevelSet(level_set),
);
Ok(project)
}
fn extensions(&self) -> &[&str] {
&["ldtk"]
}
}
#[derive(Default)]
pub struct LdtkLevelLoader;
impl AssetLoader for LdtkLevelLoader {
type Asset = Level;
type Settings = ();
type Error = LdtkLoadError;
async fn load<'a>(
&'a self,
reader: &'a mut Reader<'_>,
_settings: &'a Self::Settings,
_load_context: &'a mut LoadContext<'_>,
) -> Result<Self::Asset, Self::Error> {
let mut bytes = Vec::new();
reader.read_to_end(&mut bytes).await?;
let level = Level::from_bytes(bytes.as_slice())?;
Ok(level)
}
fn extensions(&self) -> &[&str] {
&["ldtkl"]
}
}
#[cfg(feature = "bevy")]
pub use _bevy_impl::{LdtkLoader, LdtkLevelLoader, LevelSet};
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
self.worlds
.iter()
.flat_map(|world| world.levels.iter())
.collect()
} else {
self.levels.iter().collect()
}
}
#[cfg(any(
feature = "ldtk_1_2_5",
feature = "ldtk_1_2_4",
feature = "ldtk_1_2_3",
feature = "ldtk_1_2_2",
feature = "ldtk_1_2_1",
feature = "ldtk_1_2_0",
feature = "ldtk_1_1_3",
feature = "ldtk_1_1_2",
feature = "ldtk_1_1_1",
feature = "ldtk_1_1_0",
feature = "ldtk_1_0_0"
))]
pub fn get_world_levels(&self, identifier: impl ToString) -> Vec<&Level> {
vec![]
}
#[cfg(any(feature = "ldtk_1_3_0", feature = "ldtk_1_4_0", feature = "ldtk_1_4_1"))]
pub fn get_world_levels(&self, identifier: impl ToString) -> Vec<&Level> {
let id = identifier.to_string();
self.worlds
.iter()
.find(|world| world.identifier == id)
.map(|list| list.levels.iter().collect())
.unwrap_or_else(Vec::new)
}
#[derive(Debug, thiserror::Error)]
pub enum LdtkLoadError {
#[error(transparent)]
Io(#[from] std::io::Error),
#[error(transparent)]
Serde(#[from] serde_json::Error),
#[error(transparent)]
Ldtk(#[from] ldtk::ParseError),
}
pub type LdtkProject = Project;
#[cfg(feature = "autotile")]
mod autotile_support {
use micro_autotile::{AutoRuleSet, AutoTileRule, TileMatcher, TileOutput, TileStatus};
use crate::ldtk::{AutoLayerRuleDefinition, AutoLayerRuleGroup, Project};
#[cfg(feature = "_optional_tile_list")]
fn create_output(rule: &AutoLayerRuleDefinition) -> TileOutput {
TileOutput::Random(
rule.tile_rects_ids
.iter()
.flatten()
.collect(),
)
}
#[cfg(not(feature = "_optional_tile_list"))]
fn create_output(rule: &AutoLayerRuleDefinition) -> TileOutput {
TileOutput::Random(rule.tile_ids.iter().map(|val| *val as usize).collect())
}
impl From<&AutoLayerRuleGroup> for AutoRuleSet {
fn from(value: &AutoLayerRuleGroup) -> Self {
let set = value
.rules
.iter()
.filter_map(|rule| match rule.size {
1 => Some(AutoTileRule {
chance: rule.chance as f32,
output: create_output(rule),
_ => {
TileMatcher::try_from(rule.pattern.as_slice())
.ok().map(|matcher| {
AutoTileRule {
chance: rule.chance as f32,
output: create_output(rule),
}
})
}
})
.collect();
AutoRuleSet(set)
}
}
impl From<&Project> for AutoRuleSet {
fn from(value: &Project) -> Self {
let mut base_set = AutoRuleSet::default();
for layers in value.defs.layers.iter() {
for rule_group in layers.auto_rule_groups.iter() {
base_set = base_set + rule_group.into();
}
}
base_set
}
}
}
const PROJECT_DATA: &[u8] = include_bytes!("./test_data/ver_1_2_5.ldtk");
let project = Project::from_bytes(PROJECT_DATA).expect("Failed to parse project file");
for layer in project.defs.layers.iter() {
for _auto_rule_group in layer.auto_rule_groups.iter() {}