From f045140f3d04d57e6c8fe6dad376f7474a7ca8ee Mon Sep 17 00:00:00 2001 From: Taiki Endo Date: Tue, 30 Jan 2024 20:32:05 +0900 Subject: [PATCH] Support COLLADA materials --- .github/.cspell/project-dictionary.txt | 12 + examples/kiss3d/src/main.rs | 2 +- src/collada/effect.rs | 558 +++++++++++++++++++++++++ src/collada/geometry.rs | 145 ++++--- src/collada/image.rs | 149 +++++++ src/collada/instance.rs | 357 ++++++++++++---- src/collada/iter.rs | 269 ++++-------- src/collada/material.rs | 108 +++++ src/collada/mod.rs | 458 +++++++++++++------- src/collada/scene.rs | 418 ++++++++++++++++++ src/common.rs | 33 +- src/loader.rs | 4 +- src/obj/mod.rs | 66 ++- src/utils/hex.rs | 194 +++++++++ src/utils/mod.rs | 2 + src/utils/xml.rs | 23 +- tests/assimp.rs | 541 +++++++++++------------- 17 files changed, 2537 insertions(+), 802 deletions(-) create mode 100644 src/collada/effect.rs create mode 100644 src/collada/image.rs create mode 100644 src/collada/material.rs create mode 100644 src/collada/scene.rs create mode 100644 src/utils/hex.rs diff --git a/.github/.cspell/project-dictionary.txt b/.github/.cspell/project-dictionary.txt index 8eabd54..a1ada76 100644 --- a/.github/.cspell/project-dictionary.txt +++ b/.github/.cspell/project-dictionary.txt @@ -1,6 +1,7 @@ anims binormal bitangent +blinn brep bytecount clearcoat @@ -13,21 +14,31 @@ emin endfacet endloop endsolid +FCOLLADA +GLES +GLSL gltf +GOOGLEEARTH +Gouraud idents IDREF illum instancenodes kwxport Lemire +lightmap linestrips +lookat memchr memrchr mmap mtllib nalgebra newmtl +newparam NMTOKEN +OKINO +phong polylist powerset regr @@ -53,4 +64,5 @@ vcolors vcount vertexcolors wasi +wireframe xmlspecialchars diff --git a/examples/kiss3d/src/main.rs b/examples/kiss3d/src/main.rs index 485f951..aa0e09d 100644 --- a/examples/kiss3d/src/main.rs +++ b/examples/kiss3d/src/main.rs @@ -9,7 +9,7 @@ use kiss3d::{light::Light, nalgebra as na, scene::SceneNode, window::Window}; use lexopt::prelude::*; use na::{Translation3, UnitQuaternion, Vector3}; -const DEFAULT_SCALE: f32 = 0.1; +const DEFAULT_SCALE: f32 = 1.; #[derive(Debug)] struct Args { diff --git a/src/collada/effect.rs b/src/collada/effect.rs new file mode 100644 index 0000000..67150bd --- /dev/null +++ b/src/collada/effect.rs @@ -0,0 +1,558 @@ +use super::*; + +/// The `` element. +/// +/// See the [specification][1.4] for details. +/// +/// [1.4]: https://www.khronos.org/files/collada_spec_1_4.pdf#page=277 +#[derive(Default)] +pub(super) struct LibraryEffects<'a> { + // /// The unique identifier of this element. + // pub(super) id: Option<&'a str>, + // /// The name of this element. + // pub(super) name: Option<&'a str>, + pub(super) effects: HashMap<&'a str, Effect<'a>>, +} + +/// The `` element. +/// +/// See the [specification][1.4] for details. +/// +/// [1.4]: https://www.khronos.org/files/collada_spec_1_4.pdf#page=265 +pub(super) struct Effect<'a> { + /// The unique identifier of this element. + pub(super) id: &'a str, + // /// The name of this element. + // pub(super) name: Option<&'a str>, + pub(super) profile: ProfileCommon<'a>, +} + +/// The `` element. +/// +/// See the [specification][1.4] for details. +/// +/// [1.4]: https://www.khronos.org/files/collada_spec_1_4.pdf#page=301 +pub(super) struct ProfileCommon<'a> { + // /// The unique identifier of this element. + // pub(super) id: Option<&'a str>, + pub(super) surfaces: HashMap<&'a str, Surface<'a>>, + pub(super) samplers: HashMap<&'a str, Sampler<'a>>, + + pub(super) technique: Technique<'a>, +} + +pub(super) struct Technique<'a> { + #[allow(dead_code)] // TODO + pub(super) ty: ShadeType, + + // Colors/Textures + pub(super) emission: ColorAndTexture<'a>, + pub(super) ambient: ColorAndTexture<'a>, + pub(super) diffuse: ColorAndTexture<'a>, + pub(super) specular: ColorAndTexture<'a>, + pub(super) reflective: ColorAndTexture<'a>, + pub(super) transparent: ColorAndTexture<'a>, + pub(super) has_transparency: bool, + pub(super) rgb_transparency: bool, + pub(super) invert_transparency: bool, + + pub(super) shininess: f32, + pub(super) reflectivity: f32, + pub(super) transparency: f32, + pub(super) index_of_refraction: f32, + + // GOOGLEEARTH/OKINO extensions + pub(super) double_sided: bool, + + // FCOLLADA extensions + pub(super) bump: Texture<'a>, + + // MAX3D extensions + pub(super) wireframe: bool, + pub(super) faceted: bool, +} + +/// The `` element. +/// +/// See the [specification][1.4] for details. +/// +/// [1.4]: https://www.khronos.org/files/collada_spec_1_4.pdf#page=332 +pub(super) struct Surface<'a> { + pub(super) init_from: Uri<'a, Image<'a>>, +} + +/// The `` element. +/// +/// See the [specification][1.4] for details. +/// +/// [1.4] https://www.khronos.org/files/collada_spec_1_4.pdf#page=312 +pub(super) struct Sampler<'a> { + // An xs:NCName, which is the sid of a . A + // is a definition of how a shader will resolve a + // color out of a . identifies the + // to read. + pub(super) source: &'a str, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub(super) enum ShadeType { + Constant, + Lambert, + Phong, + Blinn, +} + +pub(super) struct ColorAndTexture<'a> { + pub(super) color: Color4, + pub(super) texture: Texture<'a>, +} + +impl ColorAndTexture<'_> { + fn new(color: Color4) -> Self { + Self { + color, + texture: Texture { + texture: "", + // texcoord: "", + }, + } + } +} + +pub(super) struct Texture<'a> { + pub(super) texture: &'a str, + // pub(super) texcoord: &'a str, +} + +// ============================================================================= +// Parsing + +pub(super) fn parse_library_effects<'a>( + cx: &mut Context<'a>, + node: xml::Node<'a, '_>, +) -> io::Result<()> { + debug_assert_eq!(node.tag_name().name(), "library_effects"); + // cx.library_effects.id = node.attribute("id"); + // cx.library_effects.name = node.attribute("name"); + + for child in node.element_children() { + match child.tag_name().name() { + "effect" => { + let effect = parse_effect(cx, child)?; + cx.library_effects.effects.insert(effect.id, effect); + } + "asset" | "extra" => { /* skip */ } + _ => return Err(error::unexpected_child_elem(child)), + } + } + + // The specification says has 1 or more elements, + // but some exporters write empty tags. + + Ok(()) +} + +/* +The `` element + +Attributes: +- `id` (xs:ID, Required) +- `name` (xs:token, Optional) + +Child elements must appear in the following order if present: +- `` (0 or 1) +- `` (0 or more) +- `` (0 or more) +- profile (1 or more) + At least one profile must appear, but any number of any of + the following profiles can be included: + - + - + - + - + - + - +- `` (0 or more) +*/ +fn parse_effect<'a>(cx: &mut Context<'a>, node: xml::Node<'a, '_>) -> io::Result> { + debug_assert_eq!(node.tag_name().name(), "effect"); + let id = node.required_attribute("id")?; + let mut profile = None; + + for child in node.element_children() { + if child.tag_name().name() == "profile_COMMON" { + profile = Some(parse_profile_common(cx, child)?); + } + } + + let profile = match profile { + Some(profile) => profile, + None => return Err(error::exactly_one_elem(node, "profile_COMMON")), + }; + + Ok(Effect { + id, + // name: node.attribute("name"), + profile, + }) +} + +/* +The `` element + +Attributes: +- `id` (xs:ID, Optional) + +Child elements must appear in the following order if present: +- `` (0 or 1) +- `` (0 or more) +- `` (1) +- `` (0 or more) + +Child Elements for `` / `` +Child elements must appear in the following order if present: +- `` (0 or 1) +- shader_element (0 or more) + One of `` (FX), ``, ``, or ``. +- `` (0 or more) +*/ +fn parse_profile_common<'a>( + cx: &mut Context<'a>, + node: xml::Node<'a, '_>, +) -> io::Result> { + debug_assert_eq!(node.tag_name().name(), "profile_COMMON"); + let mut surfaces = HashMap::new(); + let mut samplers = HashMap::new(); + let mut technique = None; + + for child in node.element_children() { + match child.tag_name().name() { + "newparam" => { + parse_newparam(cx, child, &mut surfaces, &mut samplers)?; + } + "technique" => { + for t in child.element_children() { + let name = t.tag_name().name(); + match name { + "constant" | "lambert" | "phong" | "blinn" => { + technique = Some(parse_technique(t, name.parse().unwrap())?); + } + "asset" | "extra" => { /* skip */ } + _ => {} + } + } + } + "asset" | "extra" => { /* skip */ } + _ => return Err(error::unexpected_child_elem(child)), + } + } + + let technique = match technique { + Some(technique) => technique, + // TODO: technique maybe flatten? + None => return Err(error::exactly_one_elem(node, "technique")), + }; + + Ok(ProfileCommon { + // id: node.attribute("id"), + surfaces, + samplers, + technique, + }) +} + +fn parse_newparam<'a>( + _cx: &mut Context<'a>, + node: xml::Node<'a, '_>, + surfaces: &mut HashMap<&'a str, Surface<'a>>, + samplers: &mut HashMap<&'a str, Sampler<'a>>, +) -> io::Result<()> { + debug_assert_eq!(node.tag_name().name(), "newparam"); + let sid = node.required_attribute("sid")?; + + for child in node.element_children() { + match child.tag_name().name() { + "surface" => { + // image ID given inside tags + if let Some(init) = child.child("init_from") { + surfaces.insert( + sid, + Surface { + init_from: Uri::from_id(init.trimmed_text()), + }, + ); + } + } + "sampler2D" => { + // surface ID is given inside tags + if let Some(source) = child.child("source") { + samplers.insert( + sid, + Sampler { + source: source.trimmed_text(), + }, + ); + } + } + _ => return Err(error::unexpected_child_elem(child)), + } + } + + Ok(()) +} + +impl FromStr for ShadeType { + type Err = io::Error; + + fn from_str(s: &str) -> Result { + Ok(match s { + "constant" => Self::Constant, + "lambert" => Self::Lambert, + "phong" => Self::Phong, + "blinn" => Self::Blinn, + _ => bail!("unknown shade type {:?}", s), + }) + } +} + +/* +Child elements must appear in the following order if present: +- (0 or 1, fx_common_color_or_texture_type) +- (FX) (0 or 1, fx_common_color_or_texture_type) +- (0 or 1, fx_common_color_or_texture_type) +- (0 or 1, fx_common_color_or_texture_type) +- (0 or 1, fx_common_float_or_param_type) +- (0 or 1, fx_common_color_or_texture_type) +- (0 or 1, fx_common_float_or_param_type 0.0 ..= 1.0) +- (0 or 1, fx_common_color_or_texture_type) +- (0 or 1, fx_common_float_or_param_type 0.0 ..= 1.0) +- (0 or 1, fx_common_float_or_param_type) +*/ +fn parse_technique<'a>(node: xml::Node<'a, '_>, ty: ShadeType) -> io::Result> { + debug_assert_eq!(node.tag_name().name().parse::().unwrap(), ty); + let mut effect = Technique::new(ty); + + for child in node.element_children() { + let name = child.tag_name().name(); + match name { + // fx_common_color_or_texture_type + "emission" => { + parse_effect_color( + child, + &mut effect.emission.color, + &mut effect.emission.texture, + )?; + } + "ambient" => { + parse_effect_color( + child, + &mut effect.ambient.color, + &mut effect.ambient.texture, + )?; + } + "diffuse" => { + parse_effect_color( + child, + &mut effect.diffuse.color, + &mut effect.diffuse.texture, + )?; + } + "specular" => { + parse_effect_color( + child, + &mut effect.specular.color, + &mut effect.specular.texture, + )?; + } + "reflective" => { + parse_effect_color( + child, + &mut effect.reflective.color, + &mut effect.reflective.texture, + )?; + } + "transparent" => { + effect.has_transparency = true; + if let Some(opaque) = child.parse_attribute::("opaque")? { + effect.rgb_transparency = opaque.rgb_transparency(); + effect.invert_transparency = opaque.invert_transparency(); + } + parse_effect_color( + child, + &mut effect.transparent.color, + &mut effect.transparent.texture, + )?; + } + + // fx_common_float_or_param_type + "shininess" => { + if let Some(n) = parse_effect_float(child)? { + effect.shininess = n; + } + } + "reflectivity" => { + if let Some(n) = parse_effect_float(child)? { + effect.reflectivity = n; + } + } + "transparency" => { + if let Some(n) = parse_effect_float(child)? { + effect.transparency = n; + } + } + "index_of_refraction" => { + if let Some(n) = parse_effect_float(child)? { + effect.index_of_refraction = n; + } + } + + // GOOGLEEARTH/OKINO extensions + "double_sided" => { + effect.double_sided = node.parse_required_attribute(name)?; + } + + // FCOLLADA extensions + "bump" => { + let mut dummy = [0.; 4]; + parse_effect_color(child, &mut dummy, &mut effect.bump)?; + } + + // MAX3D extensions + "wireframe" => { + effect.wireframe = node.parse_required_attribute(name)?; + } + "faceted" => { + effect.faceted = node.parse_required_attribute(name)?; + } + + _ => {} + } + } + + Ok(effect) +} + +impl Technique<'_> { + fn new(ty: ShadeType) -> Self { + Self { + ty, + emission: ColorAndTexture::new([0.0, 0.0, 0.0, 1.0]), + ambient: ColorAndTexture::new([0.1, 0.1, 0.1, 1.0]), + diffuse: ColorAndTexture::new([0.6, 0.6, 0.6, 1.0]), + specular: ColorAndTexture::new([0.4, 0.4, 0.4, 1.0]), + // refs: https://www.khronos.org/files/collada_spec_1_5.pdf#page=250 + transparent: ColorAndTexture::new([1.0, 1.0, 1.0, 1.0]), + reflective: ColorAndTexture::new([0.0, 0.0, 0.0, 1.0]), + shininess: 10.0, + index_of_refraction: 1.0, + reflectivity: 0.0, + // refs: https://www.khronos.org/files/collada_spec_1_5.pdf#page=250 + transparency: 1.0, + has_transparency: false, + rgb_transparency: false, + invert_transparency: false, + double_sided: false, + bump: Texture { + texture: "", + // texcoord: "", + }, + wireframe: false, + faceted: false, + } + } +} + +// Attributes: +// Only has an attribute +// - `opaque` (Enumeration, Optional) +// +// Child Elements: +// Note: Exactly one of the child elements ``, ``, or +// `` must appear. They are mutually exclusive. +// - `` +// - `` (reference) +// - `` +// +// See also fx_common_color_or_texture_type in specification. +fn parse_effect_color<'a>( + node: xml::Node<'a, '_>, + color: &mut Color4, + texture: &mut Texture<'a>, +) -> io::Result<()> { + for child in node.element_children() { + match child.tag_name().name() { + "color" => { + let content = xml::comma_to_period(child.trimmed_text()); + let mut iter = xml::parse_float_array_exact(&content, 4); + + let r = iter.next().unwrap()?; + let g = iter.next().unwrap()?; + let b = iter.next().unwrap()?; + let a = iter.next().unwrap()?; + *color = [r, g, b, a]; + } + "texture" => { + let _texcoord = child.required_attribute("texcoord")?; + *texture = Texture { + texture: child.required_attribute("texture")?, + // texcoord, + }; + } + "param" => warn::unsupported_child_elem(child), + _ => {} + } + } + Ok(()) +} + +fn parse_effect_float(node: xml::Node<'_, '_>) -> io::Result> { + let mut float = None; + + for child in node.element_children() { + match child.tag_name().name() { + "float" => { + let content = xml::comma_to_period(child.trimmed_text()); + float = Some( + float::parse(content.as_bytes()) + .ok_or_else(|| format_err!("error while parsing a float"))?, + ); + } + "param" => warn::unsupported_child_elem(child), + _ => return Err(error::unexpected_child_elem(child)), + } + } + + Ok(float) +} + +#[allow(non_camel_case_types)] +#[derive(Clone, Copy, PartialEq, Eq)] +enum Opaque { + A_ZERO, + A_ONE, + RGB_ZERO, + RGB_ONE, +} + +impl Opaque { + fn rgb_transparency(self) -> bool { + matches!(self, Self::RGB_ZERO | Self::RGB_ONE) + } + fn invert_transparency(self) -> bool { + matches!(self, Self::RGB_ZERO | Self::A_ZERO) + } +} + +impl FromStr for Opaque { + type Err = io::Error; + + fn from_str(s: &str) -> Result { + Ok(match s { + "A_ZERO" => Self::A_ZERO, + "A_ONE" => Self::A_ONE, + "RGB_ZERO" => Self::RGB_ZERO, + "RGB_ONE" => Self::RGB_ONE, + _ => bail!("unknown shade type {:?}", s), + }) + } +} diff --git a/src/collada/geometry.rs b/src/collada/geometry.rs index 957909b..445321c 100644 --- a/src/collada/geometry.rs +++ b/src/collada/geometry.rs @@ -1,53 +1,58 @@ use super::*; -/// See [specification][spec] for details. +/// The `` element. /// -/// [spec]: https://www.khronos.org/files/collada_spec_1_4.pdf#page=99 +/// See the [specification][1.4] for details. +/// +/// [1.4] https://www.khronos.org/files/collada_spec_1_4.pdf#page=99 #[derive(Default)] -pub(super) struct LibraryGeometries { - /// The unique identifier of this element. - pub(super) id: Option, - /// The name of this element. - pub(super) name: Option, - - pub(super) geometries: BTreeMap, - - pub(super) accessors: HashMap, - pub(super) array_data: HashMap, +pub(super) struct LibraryGeometries<'a> { + // /// The unique identifier of this element. + // pub(super) id: Option<&'a str>, + // /// The name of this element. + // pub(super) name: Option<&'a str>, + pub(super) geometries: BTreeMap<&'a str, Geometry<'a>>, + + pub(super) accessors: HashMap<&'a str, Accessor<'a>>, + pub(super) array_data: HashMap<&'a str, ArrayData<'a>>, } -/// See [specification][spec] for details. +/// The `` element. +/// +/// See the [specification][1.4] for details. /// -/// [spec]: https://www.khronos.org/files/collada_spec_1_4.pdf#page=68 -pub(super) struct Geometry { +/// [1.4]: https://www.khronos.org/files/collada_spec_1_4.pdf#page=68 +pub(super) struct Geometry<'a> { /// The unique identifier of this element. - pub(super) id: String, - /// The name of this element. - #[allow(dead_code)] // TODO - pub(super) name: Option, - - pub(super) mesh: Mesh, -} - -pub(super) struct Mesh { - pub(super) vertices: Vertices, - pub(super) primitives: Vec, + pub(super) id: &'a str, + // /// The name of this element. + // pub(super) name: Option<&'a str>, + pub(super) mesh: Mesh<'a>, } -pub(super) struct VerticesInputs { - pub(super) position: UnsharedInput, - pub(super) normal: Option, - pub(super) texcoord: Option, +/// The `` element. +/// +/// See the [specification][1.4] for details. +/// +/// [1.4]: https://www.khronos.org/files/collada_spec_1_4.pdf#page=112 +pub(super) struct Mesh<'a> { + pub(super) vertices: Vertices<'a>, + pub(super) primitives: Vec>, } -pub(super) struct Vertices { +pub(super) struct Vertices<'a> { /// The unique identifier of this element. - pub(super) id: String, - /// The name of this element. - #[allow(dead_code)] // TODO - pub(super) name: Option, + pub(super) id: &'a str, + // /// The name of this element. + // pub(super) name: Option<&'a str>, + pub(super) input: VerticesInputs<'a>, +} - pub(super) input: VerticesInputs, +pub(super) struct VerticesInputs<'a> { + pub(super) position: UnsharedInput<'a>, + pub(super) normal: Option>, + pub(super) texcoord: Option>, + pub(super) color: Option>, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -82,29 +87,27 @@ impl PrimitiveType { } } -pub(super) struct PrimitiveInputs { - pub(super) vertex: SharedInput, - pub(super) normal: Option, - #[allow(dead_code)] // TODO(material) - pub(super) color: Option, - pub(super) texcoord: Vec, +pub(super) struct PrimitiveInputs<'a> { + pub(super) vertex: SharedInput<'a, Vertices<'a>>, + pub(super) normal: Option>, + pub(super) color: Option>, + pub(super) texcoord: Vec>, } -pub(super) struct Primitive { +pub(super) struct Primitive<'a> { /// The type of this element. pub(super) ty: PrimitiveType, - /// The name of this element. - #[allow(dead_code)] // TODO - pub(super) name: Option, + // /// The name of this element. + // pub(super) name: Option<&'a str>, /// The number of primitives. pub(super) count: u32, /// A symbol for a material. - #[allow(dead_code)] // TODO(material) - pub(super) material: Option, + #[allow(dead_code)] // TODO + pub(super) material: Option<&'a str>, /// Declares the input semantics of a data source and connects a consumer to that source. - pub(super) input: Option, + pub(super) input: Option>, /// The number of vertices for one polygon. /// /// Only [polylist] actually have a vcount element, but we use this field to @@ -136,13 +139,13 @@ pub(super) struct Primitive { // ----------------------------------------------------------------------------- // Parsing -pub(super) fn parse_library_geometries( - cx: &mut Context, - node: xml::Node<'_, '_>, +pub(super) fn parse_library_geometries<'a>( + cx: &mut Context<'a>, + node: xml::Node<'a, '_>, ) -> io::Result<()> { debug_assert_eq!(node.tag_name().name(), "library_geometries"); - cx.library_geometries.id = node.attribute("id").map(Into::into); - cx.library_geometries.name = node.attribute("name").map(Into::into); + // cx.library_geometries.id = node.attribute("id"); + // cx.library_geometries.name = node.attribute("name"); for node in node.element_children() { match node.tag_name().name() { @@ -150,7 +153,7 @@ pub(super) fn parse_library_geometries( if let Some(geometry) = parse_geometry(cx, node)? { cx.library_geometries .geometries - .insert(geometry.id.clone(), geometry); + .insert(geometry.id, geometry); } } "asset" | "extra" => { /* skip */ } @@ -165,7 +168,10 @@ pub(super) fn parse_library_geometries( Ok(()) } -fn parse_geometry(cx: &mut Context, node: xml::Node<'_, '_>) -> io::Result> { +fn parse_geometry<'a>( + cx: &mut Context<'a>, + node: xml::Node<'a, '_>, +) -> io::Result>> { debug_assert_eq!(node.tag_name().name(), "geometry"); // The specification say it is optional, but it is actually required. let id = node.required_attribute("id")?; @@ -191,13 +197,13 @@ fn parse_geometry(cx: &mut Context, node: xml::Node<'_, '_>) -> io::Result) -> io::Result { +fn parse_mesh<'a>(cx: &mut Context<'a>, node: xml::Node<'a, '_>) -> io::Result> { debug_assert_eq!(node.tag_name().name(), "mesh"); let mut primitives = vec![]; let mut has_source = false; @@ -242,13 +248,14 @@ fn parse_mesh(cx: &mut Context, node: xml::Node<'_, '_>) -> io::Result { }) } -fn parse_vertices(node: xml::Node<'_, '_>) -> io::Result { +fn parse_vertices<'a>(node: xml::Node<'a, '_>) -> io::Result> { debug_assert_eq!(node.tag_name().name(), "vertices"); let id = node.required_attribute("id")?; let mut input_position = None; let mut input_normal = None; let mut input_texcoord = None; + let mut input_color = None; for node in node.element_children() { match node.tag_name().name() { @@ -258,6 +265,7 @@ fn parse_vertices(node: xml::Node<'_, '_>) -> io::Result { InputSemantic::POSITION => input_position = Some(i), InputSemantic::NORMAL => input_normal = Some(i), InputSemantic::TEXCOORD => input_texcoord = Some(i), + InputSemantic::COLOR => input_color = Some(i), _semantic => { // warn!( // "unsupported semantic {:?} in ({})", @@ -279,12 +287,13 @@ fn parse_vertices(node: xml::Node<'_, '_>) -> io::Result { }; Ok(Vertices { - id: id.into(), - name: node.attribute("name").map(Into::into), + id, + // name: node.attribute("name"), input: VerticesInputs { position: input_position, normal: input_normal, texcoord: input_texcoord, + color: input_color, }, }) } @@ -306,7 +315,7 @@ impl FromStr for PrimitiveType { } } -fn parse_primitive(node: xml::Node<'_, '_>, ty: PrimitiveType) -> io::Result { +fn parse_primitive<'a>(node: xml::Node<'a, '_>, ty: PrimitiveType) -> io::Result> { debug_assert_eq!(node.tag_name().name().parse::().unwrap(), ty); let count: u32 = node.parse_required_attribute("count")?; let mut vcount = vec![]; @@ -362,7 +371,7 @@ fn parse_primitive(node: xml::Node<'_, '_>, ty: PrimitiveType) -> io::Result(content); for _ in 0..count { let value = iter.next().ok_or_else(|| { @@ -419,7 +428,7 @@ fn parse_primitive(node: xml::Node<'_, '_>, ty: PrimitiveType) -> io::Result, ty: PrimitiveType) -> io::Result, ty: PrimitiveType) -> io::Result` element. +/// +/// See the specifications ([1.4], [1.5]) for details. +/// +/// [1.4]: https://www.khronos.org/files/collada_spec_1_4.pdf#page=278 +/// [1.5]: https://www.khronos.org/files/collada_spec_1_5.pdf#page=327 +#[derive(Default)] +pub(super) struct LibraryImages<'a> { + // /// The unique identifier of this element. + // pub(super) id: Option<&'a str>, + // /// The name of this element. + // pub(super) name: Option<&'a str>, + pub(super) images: HashMap<&'a str, Image<'a>>, +} + +/// The `` element. +/// +/// See the specifications ([1.4], [1.5]) for details. +/// +/// [1.4]: https://www.khronos.org/files/collada_spec_1_4.pdf#page=268 +/// [1.5]: https://www.khronos.org/files/collada_spec_1_5.pdf#page=310 +pub(super) struct Image<'a> { + /// The unique identifier of this element. + pub(super) id: &'a str, + // /// The name of this element. + // pub(super) name: Option<&'a str>, + // /// The image format. + // pub(super) format: Option<&'a str>, + // /// The height of the image in pixels. + // pub(super) height: Option, + // /// The width of the image in pixels. + // pub(super) width: Option, + // /// The depth of the image in pixels. A 2-D image has a depth of 1, which is the default. + // pub(super) depth: u32, + /// An embedded image data or an external image file. + pub(super) source: ImageSource<'a>, +} + +/// An embedded image data or an external image file. +pub(super) enum ImageSource<'a> { + /// An embedded image data. + Data(Vec), + /// An external image file. + InitFrom(&'a str), + Skip, +} + +// ============================================================================= +// Parsing + +pub(super) fn parse_library_images<'a>( + cx: &mut Context<'a>, + node: xml::Node<'a, '_>, +) -> io::Result<()> { + debug_assert_eq!(node.tag_name().name(), "library_images"); + // cx.library_images.id = node.attribute("id"); + // cx.library_images.name = node.attribute("name"); + + for node in node.element_children() { + match node.tag_name().name() { + "image" => { + let image = parse_image(cx, node)?; + cx.library_images.images.insert(image.id, image); + } + "asset" | "extra" => { /* skip */ } + _ => return Err(error::unexpected_child_elem(node)), + } + } + + // The specification says has 1 or more elements, + // but some exporters write empty tags. + + Ok(()) +} + +fn parse_image<'a>(cx: &mut Context<'a>, node: xml::Node<'a, '_>) -> io::Result> { + debug_assert_eq!(node.tag_name().name(), "image"); + let id = node.required_attribute("id")?; + // let name = node.attribute("name"); + let is_1_4 = cx.version.is_1_4(); + if is_1_4 { + // let mut format = node.attribute("format"); + let _height: Option = node.parse_attribute("height")?; + let _width: Option = node.parse_attribute("width")?; + let _depth: u32 = node.parse_attribute("depth")?.unwrap_or(1); + } else { + // let sid = node.attribute("sid"); + } + let mut source = None; + + for node in node.element_children() { + let tag_name = node.tag_name().name(); + match tag_name { + "init_from" => { + if is_1_4 { + source = Some(ImageSource::InitFrom(node.trimmed_text())); + continue; + } + for node in node.element_children() { + match node.tag_name().name() { + "ref" => { + source = Some(ImageSource::InitFrom(node.trimmed_text())); + } + "hex" => { + // format = node.attribute("format"); + let data = hex::decode(node.trimmed_text().as_bytes())?; + source = Some(ImageSource::Data(data)); + } + _ => {} + } + } + } + "data" if is_1_4 => { + let data = hex::decode(node.trimmed_text().as_bytes())?; + source = Some(ImageSource::Data(data)); + } + "asset" | "extra" => { /* skip */ } + _ if is_1_4 => return Err(error::unexpected_child_elem(node)), + _ => {} + } + } + + let source = match source { + Some(source) => source, + None => { + if is_1_4 { + bail!( + "<{}> element must be contain or element ({})", + node.tag_name().name(), + node.node_location() + ) + } + // 1.5 has elements, but many applications ignore them. + ImageSource::Skip + } + }; + + Ok(Image { + id, + // name, + // format, + // height, + // width, + // depth, + source, + }) +} diff --git a/src/collada/instance.rs b/src/collada/instance.rs index f7255cb..0d40177 100644 --- a/src/collada/instance.rs +++ b/src/collada/instance.rs @@ -1,88 +1,301 @@ +use std::path::{Path, PathBuf}; + use super::*; -use crate::common; +use crate::{common, ShadingModel}; -pub(super) fn build_meshes(doc: &Document) -> Vec { +pub(super) fn build(doc: &Document<'_>, dir: Option<&Path>) -> common::Scene { let mut meshes = Vec::with_capacity(doc.library_geometries.geometries.len()); + let mut materials = Vec::with_capacity(doc.library_geometries.geometries.len()); + let mut instance_geometry_map = HashMap::<&str, Vec<_>>::new(); + + for node in &doc.library_visual_scenes.nodes { + for instance_geometry in &node.instance_geometry { + let e = instance_geometry_map.entry(instance_geometry.url.as_str()); + e.or_default().push(instance_geometry); + } + } + for geometry in doc.library_geometries.geometries.values() { + meshes.push(build_mesh(doc, geometry)); + materials.push(build_material(geometry, doc, &instance_geometry_map, dir)); + } + + common::Scene { materials, meshes } +} - for mesh_ref in doc.meshes() { - let mut mesh = common::Mesh { - name: mesh_ref.xml.id.clone(), - ..common::Mesh::default() - }; - - for prim in mesh_ref.primitives() { - #[allow(clippy::cast_possible_truncation)] - let prev_positions_len = mesh.vertices.len() as u32; - let p: Vec<_> = prim.positions().collect(); - let n: Vec<_> = prim.normals().collect(); - let t: Vec<_> = prim.texcoords(0).collect(); - let positions_idx = prim.vertex_indices(); - let mut normal_idx = prim.normal_indices(); - let mut texcoord_idx = prim.texcoord_indices(0); - let mut idx = 0; - - for i in positions_idx { - if let iter::Face::Triangle(vertex) = i { - mesh.vertices.push([ - p[vertex[0] as usize][0], - p[vertex[0] as usize][1], - p[vertex[0] as usize][2], +fn build_mesh(doc: &Document<'_>, geometry: &Geometry<'_>) -> common::Mesh { + let mut mesh = common::Mesh { + name: geometry.id.to_owned(), + ..Default::default() + }; + + for prim in (iter::Mesh { doc, xml: geometry }).primitives() { + #[allow(clippy::cast_possible_truncation)] + let prev_positions_len = mesh.vertices.len() as u32; + let p: Vec<_> = prim.positions().collect(); + let n: Vec<_> = prim.normals().collect(); + let t: Vec<_> = prim.texcoords(0).collect(); + let c: Vec<_> = prim.colors().collect(); + let positions_indices = prim.vertex_indices(); + let mut normal_indices = prim.normal_indices(); + let mut texcoord_indices = prim.texcoord_indices(0); + let mut color_indices = prim.color_indices(); + let mut idx = 0; + + for vertex in positions_indices { + mesh.vertices.push([ + p[vertex[0] as usize][0] * doc.asset.unit, + p[vertex[0] as usize][1] * doc.asset.unit, + p[vertex[0] as usize][2] * doc.asset.unit, + ]); + mesh.vertices.push([ + p[vertex[1] as usize][0] * doc.asset.unit, + p[vertex[1] as usize][1] * doc.asset.unit, + p[vertex[1] as usize][2] * doc.asset.unit, + ]); + mesh.vertices.push([ + p[vertex[2] as usize][0] * doc.asset.unit, + p[vertex[2] as usize][1] * doc.asset.unit, + p[vertex[2] as usize][2] * doc.asset.unit, + ]); + if !n.is_empty() { + if let Some(normal) = normal_indices.next() { + mesh.normals.push([ + n[normal[0] as usize][0], + n[normal[0] as usize][1], + n[normal[0] as usize][2], ]); - mesh.vertices.push([ - p[vertex[1] as usize][0], - p[vertex[1] as usize][1], - p[vertex[1] as usize][2], + mesh.normals.push([ + n[normal[1] as usize][0], + n[normal[1] as usize][1], + n[normal[1] as usize][2], ]); - mesh.vertices.push([ - p[vertex[2] as usize][0], - p[vertex[2] as usize][1], - p[vertex[2] as usize][2], + mesh.normals.push([ + n[normal[2] as usize][0], + n[normal[2] as usize][1], + n[normal[2] as usize][2], ]); - if !n.is_empty() { - if let Some(iter::Face::Triangle(normal)) = normal_idx.next() { - mesh.normals.push([ - n[normal[0] as usize][0], - n[normal[0] as usize][1], - n[normal[0] as usize][2], - ]); - mesh.normals.push([ - n[normal[1] as usize][0], - n[normal[1] as usize][1], - n[normal[1] as usize][2], - ]); - mesh.normals.push([ - n[normal[2] as usize][0], - n[normal[2] as usize][1], - n[normal[2] as usize][2], - ]); - } else { - panic!() + } else { + panic!() + } + } + if !t.is_empty() { + if let Some(texcoord) = texcoord_indices.next() { + mesh.texcoords[0] + .push([t[texcoord[0] as usize][0], t[texcoord[0] as usize][1]]); + mesh.texcoords[0] + .push([t[texcoord[1] as usize][0], t[texcoord[1] as usize][1]]); + mesh.texcoords[0] + .push([t[texcoord[2] as usize][0], t[texcoord[2] as usize][1]]); + } else { + panic!() + } + } + if !c.is_empty() { + if let Some(rgb) = color_indices.next() { + mesh.colors[0].push([ + c[rgb[0] as usize][0], + c[rgb[0] as usize][1], + c[rgb[0] as usize][2], + 1., + ]); + mesh.colors[0].push([ + c[rgb[1] as usize][0], + c[rgb[1] as usize][1], + c[rgb[1] as usize][2], + 1., + ]); + mesh.colors[0].push([ + c[rgb[2] as usize][0], + c[rgb[2] as usize][1], + c[rgb[2] as usize][2], + 1., + ]); + } else { + panic!() + } + } + mesh.faces.push([ + prev_positions_len + idx, + prev_positions_len + (idx + 1), + prev_positions_len + (idx + 2), + ]); + idx += 3; + } + } + + mesh +} + +fn build_material( + geometry: &Geometry<'_>, + doc: &Document<'_>, + instance_geometry_map: &HashMap<&str, Vec<&InstanceGeometry<'_>>>, + dir: Option<&Path>, +) -> common::Material { + let mut mat = common::Material::default(); + if let Some(instance_geometry) = instance_geometry_map.get(geometry.id) { + for instance_geometry in instance_geometry { + for instance_material in instance_geometry.materials.values() { + fn texture( + doc: &Document<'_>, + effect: &Effect<'_>, + texture: &Texture<'_>, + dir: Option<&Path>, + ) -> Option { + if texture.texture.is_empty() { + return None; + } + let mut image = None; + if let Some(sampler) = effect.profile.samplers.get(texture.texture) { + if let Some(surface) = effect.profile.surfaces.get(sampler.source) { + if let Some(i) = doc.get(&surface.init_from) { + image = Some(i); + } } } - if !t.is_empty() { - if let Some(iter::Face::Triangle(texcoord)) = texcoord_idx.next() { - mesh.texcoords[0] - .push([t[texcoord[0] as usize][0], t[texcoord[0] as usize][1]]); - mesh.texcoords[0] - .push([t[texcoord[1] as usize][0], t[texcoord[1] as usize][1]]); - mesh.texcoords[0] - .push([t[texcoord[2] as usize][0], t[texcoord[2] as usize][1]]); - } else { - panic!() + if image.is_none() { + if let Some(i) = doc.library_images.images.get(&texture.texture) { + image = Some(i); } } - mesh.faces.push([ - prev_positions_len + idx, - prev_positions_len + (idx + 1), - prev_positions_len + (idx + 2), - ]); - idx += 3; + if let Some(image) = image { + match &image.source { + ImageSource::Data(_data) => {} // TODO + ImageSource::InitFrom(mut p) => { + // There is an exporter writes empty tag + if p.is_empty() { + return None; + } + match dir { + Some(dir) => { + // TODO + p = p.strip_prefix("file://").unwrap_or(p); + let tmp: String; + if p.contains('\\') { + tmp = p.replace('\\', "/"); + p = &*tmp; + } + if p.starts_with("/..") { + p = p.strip_prefix('/').unwrap_or(p); + } + let p = dir.join(p); + if p.exists() { + return Some(p); + } + } + None => return Some(p.into()), + } + } + ImageSource::Skip => {} + } + } + None + } + + let effect = match doc.get(&instance_material.target) { + Some(material) => match doc.get(&material.instance_effect.url) { + Some(effect) => effect, + None => { + // debug!( + // "not found effect instance '{}'", + // material.instance_effect.url.as_str() + // ); + continue; + } + }, + None => { + // debug!( + // "not found material instance '{}'", + // instance_material.target.as_str() + // ); + continue; + } + }; + + mat.shading_model = match effect.profile.technique.ty { + _ if effect.profile.technique.faceted => Some(ShadingModel::Flat), + ShadeType::Constant => Some(ShadingModel::NoShading), + ShadeType::Lambert => Some(ShadingModel::Gouraud), + ShadeType::Blinn => Some(ShadingModel::Blinn), + ShadeType::Phong => Some(ShadingModel::Phong), + }; + + // mat.two_sided = Some(effect.profile.technique.double_sided); + // mat.wireframe = Some(effect.profile.technique.wireframe); + + mat.color.ambient = Some(effect.profile.technique.ambient.color); + mat.color.diffuse = Some(effect.profile.technique.diffuse.color); + mat.color.specular = Some(effect.profile.technique.specular.color); + mat.color.emissive = Some(effect.profile.technique.emission.color); + mat.color.reflective = Some(effect.profile.technique.reflective.color); + + mat.shininess = Some(effect.profile.technique.shininess); + mat.reflectivity = Some(effect.profile.technique.reflectivity); + mat.index_of_refraction = Some(effect.profile.technique.index_of_refraction); + + // Refs: https://github.com/assimp/assimp/blob/v5.3.1/code/AssetLib/Collada/ColladaLoader.cpp#L1588 + let mut transparency = effect.profile.technique.transparency; + let mut transparent = effect.profile.technique.transparent.color; + if transparency >= 0. && transparency <= 1. { + if effect.profile.technique.rgb_transparency { + transparency *= 0.212671 * transparent[0] + + 0.715160 * transparent[1] + + 0.072169 * transparent[2]; + transparent[3] = 1.; + mat.color.transparent = Some(transparent); + } else { + transparency *= transparent[3]; + } + if effect.profile.technique.invert_transparency { + transparency = 1. - transparency; + } + if effect.profile.technique.has_transparency || transparency < 1. { + mat.opacity = Some(transparency); + } + } + + if let Some(p) = + texture(doc, effect, &effect.profile.technique.ambient.texture, dir) + { + // Refs: https://github.com/assimp/assimp/blob/v5.3.1/code/AssetLib/Collada/ColladaLoader.cpp#L1619 + mat.texture.lightmap = Some(p); + } + if let Some(p) = + texture(doc, effect, &effect.profile.technique.emission.texture, dir) + { + mat.texture.emissive = Some(p); + } + if let Some(p) = + texture(doc, effect, &effect.profile.technique.specular.texture, dir) + { + mat.texture.specular = Some(p); + } + if let Some(p) = + texture(doc, effect, &effect.profile.technique.diffuse.texture, dir) + { + mat.texture.diffuse = Some(p); + } + if let Some(p) = texture(doc, effect, &effect.profile.technique.bump, dir) { + mat.texture.normal = Some(p); + } + if let Some(p) = texture( + doc, + effect, + &effect.profile.technique.transparent.texture, + dir, + ) { + mat.texture.opacity = Some(p); + } + if let Some(p) = texture( + doc, + effect, + &effect.profile.technique.reflective.texture, + dir, + ) { + mat.texture.reflection = Some(p); } } } - - meshes.push(mesh); } - - meshes + mat } diff --git a/src/collada/iter.rs b/src/collada/iter.rs index 0a7ee23..4c2c681 100644 --- a/src/collada/iter.rs +++ b/src/collada/iter.rs @@ -4,45 +4,12 @@ use std::{ slice, }; -use crate::{collada as ast, Vec2, Vec3}; - -impl ast::Document { - pub(super) fn meshes(&self) -> Meshes<'_> { - Meshes { - iter: self.library_geometries.geometries.values().enumerate(), - doc: self, - } - } -} - -pub(super) struct Meshes<'a> { - pub(super) iter: - iter::Enumerate>, - pub(super) doc: &'a ast::Document, -} - -impl<'a> Iterator for Meshes<'a> { - type Item = Mesh<'a>; - - fn next(&mut self) -> Option { - self.iter - .next() - .map(|(_index, xml)| Mesh { doc: self.doc, xml }) - } - - fn size_hint(&self) -> (usize, Option) { - self.iter.size_hint() - } -} - -impl ExactSizeIterator for Meshes<'_> {} - -impl FusedIterator for Meshes<'_> {} +use crate::{collada as ast, Face, Vec2, Vec3}; #[derive(Clone)] pub(super) struct Mesh<'a> { - pub(super) doc: &'a ast::Document, - pub(super) xml: &'a ast::Geometry, + pub(super) doc: &'a ast::Document<'a>, + pub(super) xml: &'a ast::Geometry<'a>, } impl<'a> Mesh<'a> { @@ -56,7 +23,7 @@ impl<'a> Mesh<'a> { pub(super) struct Primitives<'a> { mesh: Mesh<'a>, - iter: iter::Enumerate>, + iter: iter::Enumerate>>, } impl<'a> Iterator for Primitives<'a> { @@ -75,13 +42,12 @@ impl<'a> Iterator for Primitives<'a> { } impl ExactSizeIterator for Primitives<'_> {} - impl FusedIterator for Primitives<'_> {} #[derive(Clone)] pub(super) struct Primitive<'a> { pub(super) mesh: Mesh<'a>, - pub(super) xml: &'a ast::Primitive, + pub(super) xml: &'a ast::Primitive<'a>, } impl<'a> Primitive<'a> { @@ -94,8 +60,12 @@ impl<'a> Primitive<'a> { let position = &self.mesh.xml.mesh.vertices.input.position; let acc = &self.mesh.doc[&position.source]; let data = self.mesh.doc[&acc.source].as_float().unwrap(); - // TODO: check param names are ["X", "Y", "Z"] and type is "float" - assert!(acc.stride >= 3); + // ["X", "Y", "Z"] + if acc.stride < 3 || acc.params.len() < 3 || acc.params.iter().any(|p| p.ty != "float") + { + // TODO: error? + return Positions(None); + } assert!((acc.count * acc.stride) as usize <= data.len()); Positions(Some((acc.count, data.chunks(acc.stride as _)))) } else { @@ -123,8 +93,11 @@ impl<'a> Primitive<'a> { None => return Positions(None), }; let data = self.mesh.doc[&acc.source].as_float().unwrap(); - // TODO: check param names are ["X", "Y", "Z"] and type is "float" - assert!(acc.stride >= 3); + // ["X", "Y", "Z"] + if acc.stride < 3 || acc.params.len() < 3 || acc.params.iter().any(|p| p.ty != "float") { + // TODO: error? + return Positions(None); + } assert!((acc.count * acc.stride) as usize <= data.len()); Positions(Some((acc.count, data.chunks(acc.stride as _)))) } @@ -151,16 +124,48 @@ impl<'a> Primitive<'a> { None => return Texcoords(None), }; let data = self.mesh.doc[&acc.source].as_float().unwrap(); - // TODO: check param names are ["S", "T"] and type is "float" - assert!(acc.stride >= 2); + // ["S", "T"] or ["S", "T", "P"] + if acc.stride < 2 || acc.params.len() < 2 || acc.params.iter().any(|p| p.ty != "float") { + // TODO: error? + return Texcoords(None); + } assert!((acc.count * acc.stride) as usize <= data.len()); Texcoords(Some(TexcoordsInner { iter: data.chunks(acc.stride as _), })) } + pub(super) fn colors(&self) -> Colors<'a> { + let acc = match &self.xml.input { + Some(input) => match &input.color { + Some(color) => &self.mesh.doc[&color.source], + None => { + if self.mesh.xml.mesh.vertices.id == input.vertex.source { + match &self.mesh.xml.mesh.vertices.input.color { + Some(color) => &self.mesh.doc[&color.source], + None => return Positions(None), + } + } else { + // TODO: search other mesh's vertices + todo!() + } + } + }, + None => return Positions(None), + }; + let data = self.mesh.doc[&acc.source].as_float().unwrap(); + // ["R", "G", "B"] or ["R", "G", "B", "A"] + if acc.stride < 3 || acc.params.len() < 3 || acc.params.iter().any(|p| p.ty != "float") { + // TODO: error? + return Positions(None); + } + assert!((acc.count * acc.stride) as usize <= data.len()); + Positions(Some((acc.count, data.chunks(acc.stride as _)))) + } + fn vertex_indices_inner(&self, offset: u32) -> IndicesInner<'a> { match self.xml.ty { + ast::PrimitiveType::Lines | ast::PrimitiveType::LineStrips => IndicesInner::Skip, ast::PrimitiveType::Polylist | ast::PrimitiveType::Polygons => IndicesInner::Polylist { offset, indices: &self.xml.p, @@ -183,56 +188,23 @@ impl<'a> Primitive<'a> { range: None, } } - ast::PrimitiveType::Lines => IndicesInner::Lines { - offset, - indices: self.xml.p.chunks(self.xml.stride as _), - }, - ast::PrimitiveType::LineStrips => IndicesInner::LineStrips { - offset, - indices: &self.xml.p, - stride: self.xml.stride, - index: 0, - vcount: self.xml.vcount.iter(), - range: None, - }, } } - fn vertex_indices_size(&self, min_face_size: u32) -> u32 { - debug_assert!((1..=3).contains(&min_face_size)); + fn vertex_indices_size(&self) -> u32 { match self.xml.ty { ast::PrimitiveType::Polylist | ast::PrimitiveType::Polygons => self .xml .vcount .iter() - .map(|count| { - if (min_face_size..=3).contains(count) { - 1 - } else if *count > 3 { - count - 2 - } else { - 0 - } - }) + .map(|count| if *count >= 3 { count - 2 } else { 0 }) .sum(), ast::PrimitiveType::Triangles => self.xml.count, ast::PrimitiveType::TriStrips | ast::PrimitiveType::TriFans => { self.xml.vcount.iter().map(|&count| count - 2).sum() } - ast::PrimitiveType::Lines => { - if min_face_size <= 2 { - self.xml.count - } else { - 0 - } - } - ast::PrimitiveType::LineStrips => { - if min_face_size <= 2 { - self.xml.vcount.iter().map(|&count| count - 1).sum() - } else { - 0 - } - } + ast::PrimitiveType::Lines => 0, + ast::PrimitiveType::LineStrips => 0, } } @@ -242,7 +214,7 @@ impl<'a> Primitive<'a> { None => return VertexIndices::none(), }; VertexIndices { - remaining: self.vertex_indices_size(1), + remaining: self.vertex_indices_size(), inner: self.vertex_indices_inner(offset), } } @@ -267,7 +239,7 @@ impl<'a> Primitive<'a> { None => return VertexIndices::none(), }; VertexIndices { - remaining: self.vertex_indices_size(1), + remaining: self.vertex_indices_size(), inner: self.vertex_indices_inner(offset), } } @@ -292,7 +264,32 @@ impl<'a> Primitive<'a> { None => return VertexIndices::none(), }; VertexIndices { - remaining: self.vertex_indices_size(1), + remaining: self.vertex_indices_size(), + inner: self.vertex_indices_inner(offset), + } + } + + pub(super) fn color_indices(&self) -> VertexIndices<'a> { + let offset = match &self.xml.input { + Some(input) => match &input.color { + Some(color) => color.offset, + None => { + if self.mesh.xml.mesh.vertices.id == input.vertex.source { + if self.mesh.xml.mesh.vertices.input.color.is_some() { + input.vertex.offset + } else { + return VertexIndices::none(); + } + } else { + // TODO: search other mesh's vertices + todo!() + } + } + }, + None => return VertexIndices::none(), + }; + VertexIndices { + remaining: self.vertex_indices_size(), inner: self.vertex_indices_inner(offset), } } @@ -322,10 +319,10 @@ impl Iterator for Positions<'_> { } impl ExactSizeIterator for Positions<'_> {} - impl FusedIterator for Positions<'_> {} pub(super) type Normals<'a> = Positions<'a>; +pub(super) type Colors<'a> = Positions<'a>; pub(super) struct Texcoords<'a>(Option>); @@ -356,7 +353,6 @@ impl Iterator for Texcoords<'_> { } impl ExactSizeIterator for Texcoords<'_> {} - impl FusedIterator for Texcoords<'_> {} pub(super) struct VertexIndices<'a> { @@ -385,18 +381,7 @@ enum IndicesInner<'a> { index: usize, range: Option>, }, - Lines { - offset: u32, - indices: slice::Chunks<'a, u32>, - }, - LineStrips { - offset: u32, - indices: &'a [u32], - stride: u32, - vcount: slice::Iter<'a, u32>, - index: usize, - range: Option>, - }, + Skip, None, } @@ -442,7 +427,7 @@ impl Iterator for VertexIndices<'_> { let x = *index + offset; let y = *index + offset + stride * k as usize; let z = *index + offset + stride * (k as usize + 1); - let value = Face::Triangle([indices[x], indices[y], indices[z]]); + let value = [indices[x], indices[y], indices[z]]; // NOTE: Do *not* increment index until range ends. return Some(value); } @@ -456,23 +441,11 @@ impl Iterator for VertexIndices<'_> { let x = *index + offset; let y = *index + offset + stride; let z = *index + offset + stride * 2; - let value = Face::Triangle([indices[x], indices[y], indices[z]]); - *index += stride * vc as usize; - Some(value) - } - 2 => { - let x = *index + offset; - let y = *index + offset + stride; - let value = Face::Line([indices[x], indices[y]]); - *index += stride * vc as usize; - Some(value) - } - 1 => { - let x = *index + offset; - let value = Face::Point([indices[x]]); + let value = [indices[x], indices[y], indices[z]]; *index += stride * vc as usize; Some(value) } + 1..=2 => self.next(), 0 => unreachable!(), _ => { let mut ri = 1..vc - 1; @@ -480,7 +453,7 @@ impl Iterator for VertexIndices<'_> { let x = *index + offset; let y = *index + offset + stride * k as usize; let z = *index + offset + stride * (k as usize + 1); - let value = Face::Triangle([indices[x], indices[y], indices[z]]); + let value = [indices[x], indices[y], indices[z]]; // Set range for next call. // NOTE: Do *not* increment index until range ends. *range = Some(ri); @@ -492,65 +465,13 @@ impl Iterator for VertexIndices<'_> { let indices1 = indices.next().unwrap(); let indices2 = indices.next().unwrap(); let indices3 = indices.next().unwrap(); - Some(Face::Triangle([ + Some([ indices1[*offset as usize], indices2[*offset as usize], indices3[*offset as usize], - ])) - } - IndicesInner::Lines { offset, indices } => { - let indices1 = indices.next().unwrap(); - let indices2 = indices.next().unwrap(); - Some(Face::Line([ - indices1[*offset as usize], - indices2[*offset as usize], - ])) - } - IndicesInner::LineStrips { - offset, - indices, - stride, - index, - vcount, - range, - } => { - let offset = *offset as usize; - let stride = *stride as usize; - if let Some(r) = range { - if let Some(k) = r.next() { - let x = *index + offset; - let y = *index + offset + stride * k as usize; - let value = Face::Line([indices[x], indices[y]]); - // NOTE: Do *not* increment index until range ends. - return Some(value); - } - let vc = r.end; - *index += stride * vc as usize; - *range = None; - } - let vc = *vcount.next()?; - match vc { - 2 => { - let x = *index + offset; - let y = *index + offset + stride; - let value = Face::Line([indices[x], indices[y]]); - *index += stride * vc as usize; - Some(value) - } - 0..=2 => unreachable!(), - _ => { - let mut r = 1..vc; - let k = r.next().unwrap(); - let x = *index + offset; - let y = *index + offset + stride * k as usize; - let value = Face::Line([indices[x], indices[y]]); - // Set range for next call. - // NOTE: Do *not* increment index until range ends. - *range = Some(r); - Some(value) - } - } + ]) } + IndicesInner::Skip => self.next(), IndicesInner::None => unreachable!(), } } @@ -561,12 +482,4 @@ impl Iterator for VertexIndices<'_> { } impl ExactSizeIterator for VertexIndices<'_> {} - impl FusedIterator for VertexIndices<'_> {} - -#[derive(Clone)] -pub(super) enum Face { - Point(#[allow(dead_code)] [u32; 1]), - Line(#[allow(dead_code)] [u32; 2]), - Triangle([u32; 3]), -} diff --git a/src/collada/material.rs b/src/collada/material.rs new file mode 100644 index 0000000..b362f2f --- /dev/null +++ b/src/collada/material.rs @@ -0,0 +1,108 @@ +use super::*; + +/// The `` element. +/// +/// See the [specification][1.4] for details. +/// +/// [1.4]: https://www.khronos.org/files/collada_spec_1_4.pdf#page=279 +#[derive(Default)] +pub(super) struct LibraryMaterials<'a> { + // /// The unique identifier of this element. + // pub(super) id: Option<&'a str>, + // /// The name of this element. + // pub(super) name: Option<&'a str>, + pub(super) materials: HashMap<&'a str, Material<'a>>, +} + +/// The `` element. +/// +/// See the [specification][1.4] for details. +/// +/// [1.4]: https://www.khronos.org/files/collada_spec_1_4.pdf#page=280 +pub(super) struct Material<'a> { + /// The unique identifier of this element. + pub(super) id: &'a str, + // /// The name of this element. + // pub(super) name: Option<&'a str>, + pub(super) instance_effect: InstanceEffect<'a>, +} + +/// The `` element. +/// +/// See the [specification][1.4] for details. +/// +/// [1.4]: https://www.khronos.org/files/collada_spec_1_4.pdf#page=271 +pub(super) struct InstanceEffect<'a> { + // /// The scoped identifier of this element. + // pub(super) sid: Option<&'a str>, + // /// The name of this element. + // pub(super) name: Option<&'a str>, + /// The URI of the location of the [`Effect`] to instantiate. + pub(super) url: Uri<'a, Effect<'a>>, +} + +// ============================================================================= +// Parsing + +pub(super) fn parse_library_materials<'a>( + cx: &mut Context<'a>, + node: xml::Node<'a, '_>, +) -> io::Result<()> { + debug_assert_eq!(node.tag_name().name(), "library_materials"); + // cx.library_materials.id = node.attribute("id"); + // cx.library_materials.name = node.attribute("name"); + + for node in node.element_children() { + match node.tag_name().name() { + "material" => { + let material = parse_material(node)?; + cx.library_materials.materials.insert(material.id, material); + } + "asset" | "extra" => { /* skip */ } + _ => return Err(error::unexpected_child_elem(node)), + } + } + + // The specification says has 1 or more elements, + // but some exporters write empty tags. + + Ok(()) +} + +fn parse_material<'a>(node: xml::Node<'a, '_>) -> io::Result> { + debug_assert_eq!(node.tag_name().name(), "material"); + // The specification say it is optional, but it is actually required. + let id = node.required_attribute("id")?; + let mut instance_effect = None; + + for node in node.element_children() { + match node.tag_name().name() { + "instance_effect" => { + instance_effect = Some(parse_instance_effect(node)?); + } + "asset" | "extra" => { /* skip */ } + _ => return Err(error::unexpected_child_elem(node)), + } + } + + let instance_effect = match instance_effect { + Some(instance_effect) => instance_effect, + None => return Err(error::one_or_more_elems(node, "instance_effect")), + }; + + Ok(Material { + id, + // name: node.attribute("name"), + instance_effect, + }) +} + +fn parse_instance_effect<'a>(node: xml::Node<'a, '_>) -> io::Result> { + debug_assert_eq!(node.tag_name().name(), "instance_effect"); + let url = node.parse_url("url")?; + Ok(InstanceEffect { + // sid: node.attribute("sid"), + // name: node.attribute("name"), + url, + }) +} diff --git a/src/collada/mod.rs b/src/collada/mod.rs index 1a7e5d3..b448c51 100644 --- a/src/collada/mod.rs +++ b/src/collada/mod.rs @@ -4,41 +4,58 @@ #![allow(clippy::wildcard_imports)] // TODO +mod effect; mod geometry; +mod image; mod instance; mod iter; +mod material; +mod scene; use std::{ - cmp, collections::BTreeMap, collections::HashMap, fmt, io, marker::PhantomData, ops, str, - str::FromStr, + cmp, + collections::{BTreeMap, HashMap}, + fmt, io, + marker::PhantomData, + ops, + path::Path, + str::{self, FromStr}, }; -use self::geometry::*; +use self::{effect::*, geometry::*, image::*, material::*, scene::*}; use crate::{ + common, utils::{ + float, hex, utf16::decode_string, xml::{self, XmlNodeExt}, }, - Scene, + Color4, }; /// Parses meshes from bytes of COLLADA text. #[inline] -pub fn from_slice(bytes: &[u8]) -> io::Result { - let bytes = &decode_string(bytes)?; - from_str(bytes) +pub fn from_slice(bytes: &[u8]) -> io::Result { + from_slice_internal(bytes, None) } /// Parses meshes from a string of COLLADA text. #[inline] -pub fn from_str(s: &str) -> io::Result { +pub fn from_str(s: &str) -> io::Result { + from_str_internal(s, None) +} + +#[inline] +pub(crate) fn from_slice_internal(bytes: &[u8], path: Option<&Path>) -> io::Result { + let bytes = &decode_string(bytes)?; + from_str_internal(bytes, path) +} + +#[inline] +pub(crate) fn from_str_internal(s: &str, path: Option<&Path>) -> io::Result { let xml = xml::Document::parse(s).map_err(crate::error::invalid_data)?; let collada = Document::parse(&xml)?; - let meshes = instance::build_meshes(&collada); - let materials = (0..meshes.len()) - .map(|_| crate::Material::default()) - .collect(); // TODO - Ok(Scene { materials, meshes }) + Ok(instance::build(&collada, path.and_then(Path::parent))) } // Inspired by gltf-json's `Get` trait. @@ -51,51 +68,57 @@ trait Get { macro_rules! impl_get_by_uri { ($ty:ty, $($field:ident).*) => { - impl Get> for Document { + impl<'a> Get> for Document<'a> { type Target = $ty; - fn get(&self, index: &Uri<$ty>) -> Option<&Self::Target> { - self.$($field).*.get(&index.0) + fn get(&self, index: &Uri<'a, $ty>) -> Option<&Self::Target> { + self.$($field).*.get(&*index.0) } } }; } -impl_get_by_uri!(Accessor, library_geometries.accessors); -impl_get_by_uri!(ArrayData, library_geometries.array_data); -impl_get_by_uri!(Geometry, library_geometries.geometries); +impl_get_by_uri!(Accessor<'a>, library_geometries.accessors); +impl_get_by_uri!(ArrayData<'a>, library_geometries.array_data); +impl_get_by_uri!(Effect<'a>, library_effects.effects); +impl_get_by_uri!(Geometry<'a>, library_geometries.geometries); +impl_get_by_uri!(Image<'a>, library_images.images); +impl_get_by_uri!(Material<'a>, library_materials.materials); -struct Uri(String, PhantomData T>); +struct Uri<'a, T>(&'a str, PhantomData T>); -impl Uri { - fn parse(url: &str) -> io::Result { +impl<'a, T> Uri<'a, T> { + fn parse(url: &'a str) -> io::Result { // skipping the leading #, hopefully the remaining text is the accessor ID only - if let Some(url) = url.strip_prefix('#') { - Ok(Self(url.to_owned(), PhantomData)) + if let Some(id) = url.strip_prefix('#') { + Ok(Self(id, PhantomData)) } else { Err(format_err!("unknown reference format {:?}", url)) } } - fn cast(self) -> Uri { + fn from_id(id: &'a str) -> Self { + Self(id, PhantomData) + } + + fn cast(self) -> Uri<'a, U> { Uri(self.0, PhantomData) } - #[allow(dead_code)] // TODO(material) - fn as_str(&self) -> &str { - &self.0 + fn as_str(&self) -> &'a str { + self.0 } } -impl PartialEq for Uri { +impl PartialEq for Uri<'_, T> { fn eq(&self, other: &Self) -> bool { self.0 == other.0 } } -impl Eq for Uri {} +impl Eq for Uri<'_, T> {} -impl PartialEq for Uri +impl PartialEq for Uri<'_, T> where S: ?Sized + AsRef, { @@ -104,27 +127,27 @@ where } } -impl PartialEq> for str { +impl PartialEq> for str { #[inline] - fn eq(&self, other: &Uri) -> bool { + fn eq(&self, other: &Uri<'_, T>) -> bool { self == other.0 } } -impl PartialEq> for String { +impl PartialEq> for &str { #[inline] - fn eq(&self, other: &Uri) -> bool { + fn eq(&self, other: &Uri<'_, T>) -> bool { *self == other.0 } } trait ColladaXmlNodeExt<'a, 'input> { - fn parse_url(&self, name: &str) -> io::Result>; - fn parse_url_opt(&self, name: &str) -> io::Result>>; + fn parse_url(&self, name: &str) -> io::Result>; + fn parse_url_opt(&self, name: &str) -> io::Result>>; } impl<'a, 'input> ColladaXmlNodeExt<'a, 'input> for xml::Node<'a, 'input> { - fn parse_url(&self, name: &str) -> io::Result> { + fn parse_url(&self, name: &str) -> io::Result> { let url = self.required_attribute(name)?; Uri::parse(url).map_err(|e| { format_err!( @@ -137,7 +160,7 @@ impl<'a, 'input> ColladaXmlNodeExt<'a, 'input> for xml::Node<'a, 'input> { }) } - fn parse_url_opt(&self, name: &str) -> io::Result>> { + fn parse_url_opt(&self, name: &str) -> io::Result>> { if let Some(url) = self.attribute(name) { Uri::parse(url).map(Some).map_err(|e| { format_err!( @@ -161,7 +184,14 @@ struct Version { } impl Version { - const MIN: Self = Self { minor: 4, patch: 0 }; + const MIN: Self = Self::new(4, 0); + + const fn new(minor: u32, patch: u32) -> Self { + Self { minor, patch } + } + fn is_1_4(self) -> bool { + self >= Self::new(4, 0) && self < Self::new(5, 0) + } } impl FromStr for Version { @@ -176,7 +206,7 @@ impl FromStr for Version { } let minor = digits.next()?.parse().ok()?; let patch = digits.next()?.parse().ok()?; - Some(Self { minor, patch }) + Some(Self::new(minor, patch)) })() .ok_or_else(|| format_err!("unrecognized version format {:?}", s)) } @@ -188,17 +218,28 @@ impl fmt::Display for Version { } } -struct Context { - library_geometries: LibraryGeometries, +struct Context<'a> { + version: Version, + asset: Asset, + library_effects: LibraryEffects<'a>, + library_geometries: LibraryGeometries<'a>, + library_images: LibraryImages<'a>, + library_materials: LibraryMaterials<'a>, + library_visual_scenes: LibraryVisualScenes<'a>, } -struct Document { - library_geometries: LibraryGeometries, +struct Document<'a> { + asset: Asset, + library_effects: LibraryEffects<'a>, + library_geometries: LibraryGeometries<'a>, + library_images: LibraryImages<'a>, + library_materials: LibraryMaterials<'a>, + library_visual_scenes: LibraryVisualScenes<'a>, } -impl Document { +impl<'a> Document<'a> { /* - The `` element + The `` element. Attributes: - `version` (Required) @@ -232,7 +273,7 @@ impl Document { - `` (0 or 1) - `` (0 or more) */ - fn parse(doc: &xml::Document<'_>) -> io::Result { + fn parse(doc: &'a xml::Document<'_>) -> io::Result { let node = doc.root_element(); if node.tag_name().name() != "COLLADA" { bail!("root element is not "); @@ -245,14 +286,37 @@ impl Document { // debug!("collada schema version is {}", version); let mut cx = Context { + version, + asset: Asset { + unit: DEFAULT_UNIT_SIZE, + }, + library_effects: LibraryEffects::default(), library_geometries: LibraryGeometries::default(), + library_images: LibraryImages::default(), + library_materials: LibraryMaterials::default(), + library_visual_scenes: LibraryVisualScenes::default(), }; for node in node.element_children() { match node.tag_name().name() { + "library_effects" => { + parse_library_effects(&mut cx, node)?; + } "library_geometries" => { parse_library_geometries(&mut cx, node)?; } + "library_images" => { + parse_library_images(&mut cx, node)?; + } + "library_materials" => { + parse_library_materials(&mut cx, node)?; + } + "library_visual_scenes" => { + parse_library_visual_scenes(&mut cx, node)?; + } + "asset" => { + cx.asset = Asset::parse(node)?; + } _name => { // debug!("ignored <{}> element", name); } @@ -260,7 +324,12 @@ impl Document { } Ok(Self { + asset: cx.asset, + library_effects: cx.library_effects, library_geometries: cx.library_geometries, + library_images: cx.library_images, + library_materials: cx.library_materials, + library_visual_scenes: cx.library_visual_scenes, }) } @@ -272,7 +341,7 @@ impl Document { } } -impl ops::Index<&T> for Document +impl ops::Index<&T> for Document<'_> where Self: Get, { @@ -284,20 +353,59 @@ where } } -struct Source { +const DEFAULT_UNIT_SIZE: f32 = 1.; + +/// The `` element of the `` element. +struct Asset { + // + unit: f32, +} + +impl Asset { + fn parse(node: xml::Node<'_, '_>) -> io::Result { + debug_assert_eq!(node.tag_name().name(), "asset"); + + let mut unit = None; + for child in node.element_children() { + match child.tag_name().name() { + "unit" => { + if let Some(v) = child.attribute("meter") { + let v = xml::comma_to_period(v); + unit = Some(v.parse().map_err(|e| { + format_err!( + "{} in <{}> element at {}: {:?}", + e, + child.tag_name().name(), + child.attr_location("meter"), + v + ) + })?); + } + } + "up_axis" => {} // TODO + _ => { /* ignore */ } + } + } + + Ok(Self { + unit: unit.unwrap_or(DEFAULT_UNIT_SIZE), + }) + } +} + +struct Source<'a> { // Required - id: String, - // Optional - #[allow(dead_code)] - name: Option, + id: &'a str, + // // Optional + // name: Option<&'a str>, // 0 or 1 - array_element: Option, + array_element: Option>, // 0 or 1 - accessor: Option, + accessor: Option>, } -impl Source { +impl<'a> Source<'a> { /* The `` element (core) @@ -319,7 +427,7 @@ impl Source { - `` (0 or 1) - `` (core) (0 or more) */ - fn parse(node: xml::Node<'_, '_>) -> io::Result { + fn parse(node: xml::Node<'a, '_>) -> io::Result { debug_assert_eq!(node.tag_name().name(), "source"); let id = node.required_attribute("id")?; let mut array_element = None; @@ -353,31 +461,29 @@ impl Source { } Ok(Self { - id: id.into(), - name: node.attribute("name").map(Into::into), + id, + // name: node.attribute("name"), array_element, accessor, }) } } -struct ArrayElement { - // Required - id: String, +struct ArrayElement<'a> { // Required - #[allow(dead_code)] - count: u32, - - data: ArrayData, + id: &'a str, + // // Required + // count: u32, + data: ArrayData<'a>, } -fn parse_array_element(node: xml::Node<'_, '_>) -> io::Result { +fn parse_array_element<'a>(node: xml::Node<'a, '_>) -> io::Result> { let name = node.tag_name().name(); let is_string_array = name == "IDREF_array" || name == "Name_array"; let id = node.required_attribute("id")?; - let count = node.parse_required_attribute("count")?; - let mut content = xml::trim(node.text().unwrap_or_default()); + let count: u32 = node.parse_required_attribute("count")?; + let mut content = node.trimmed_text(); // some exporters write empty data arrays, but we need to conserve them anyways because others might reference them if content.is_empty() { @@ -387,8 +493,8 @@ fn parse_array_element(node: xml::Node<'_, '_>) -> io::Result { ArrayData::Float(vec![]) }; return Ok(ArrayElement { - id: id.into(), - count, + id, + // count, data, }); } @@ -413,21 +519,20 @@ fn parse_array_element(node: xml::Node<'_, '_>) -> io::Result { { n += 1; } - values.push(content[..n].into()); + values.push(&content[..n]); content = xml::trim_start(content.get(n..).unwrap_or_default()); } Ok(ArrayElement { - id: id.into(), - count, + id, + // count, data: ArrayData::String(values), }) } else { // TODO: check large count let mut values = Vec::with_capacity(count as _); - // TODO: https://stackoverflow.com/questions/4325363/converting-a-number-with-comma-as-decimal-point-to-float - let content = content.replace(',', "."); + let content = xml::comma_to_period(content); for res in xml::parse_float_array_exact(&content, count as _) { let value = res.map_err(|e| { format_err!( @@ -441,35 +546,35 @@ fn parse_array_element(node: xml::Node<'_, '_>) -> io::Result { } Ok(ArrayElement { - id: id.into(), - count, + id, + // count, data: ArrayData::Float(values), }) } } /// Data source array. -enum ArrayData { +enum ArrayData<'a> { /// Float(Vec), /// or - String(Vec), - // TODO(material) + String( + #[allow(dead_code)] // TODO + Vec<&'a str>, + ), // /// // Int(Vec), // /// // Bool(Vec), } -#[allow(dead_code)] // TODO(material) -impl ArrayData { - fn is_float(&self) -> bool { - matches!(self, Self::Float(..)) - } - - fn is_string(&self) -> bool { - matches!(self, Self::String(..)) - } +impl ArrayData<'_> { + // fn is_float(&self) -> bool { + // matches!(self, Self::Float(..)) + // } + // fn is_string(&self) -> bool { + // matches!(self, Self::String(..)) + // } fn as_float(&self) -> Option<&[f32]> { match self { @@ -477,46 +582,46 @@ impl ArrayData { Self::String(..) => None, } } - - fn as_string(&self) -> Option<&[String]> { - match self { - Self::Float(..) => None, - Self::String(v) => Some(v), - } - } - - fn len(&self) -> usize { - match self { - Self::Float(v) => v.len(), - Self::String(v) => v.len(), - } - } - - fn is_empty(&self) -> bool { - match self { - Self::Float(v) => v.is_empty(), - Self::String(v) => v.is_empty(), - } - } -} - -struct Accessor { + // fn as_string(&self) -> Option<&[&'a str]> { + // match self { + // Self::String(v) => Some(v), + // _ => None, + // } + // } + + // fn len(&self) -> usize { + // match self { + // Self::Float(v) => v.len(), + // Self::String(v) => v.len(), + // Self::Int(v) => v.len(), + // Self::Bool(v) => v.len(), + // } + // } + // fn is_empty(&self) -> bool { + // match self { + // Self::Float(v) => v.is_empty(), + // Self::String(v) => v.is_empty(), + // Self::Int(v) => v.is_empty(), + // Self::Bool(v) => v.is_empty(), + // } + // } +} + +struct Accessor<'a> { // Required count: u32, - // Optional - #[allow(dead_code)] // TODO(material) - offset: u32, + // // Optional + // offset: u32, // Required - source: Uri, + source: Uri<'a, ArrayData<'a>>, // Optional stride: u32, // 0 or more - #[allow(dead_code)] // TODO(material) - params: Vec, + params: Vec>, } -impl Accessor { +impl<'a> Accessor<'a> { /* The `` element @@ -529,12 +634,12 @@ impl Accessor { Child elements: - `` (0 or more) */ - fn parse(node: xml::Node<'_, '_>) -> io::Result { + fn parse(node: xml::Node<'a, '_>) -> io::Result { debug_assert_eq!(node.tag_name().name(), "accessor"); - let count = node.parse_required_attribute("count")?; + let count: u32 = node.parse_required_attribute("count")?; let source = node.parse_url("source")?; - let offset = node.parse_attribute("offset")?.unwrap_or(0); - let stride = node.parse_attribute("stride")?.unwrap_or(1); + let _offset: u32 = node.parse_attribute("offset")?.unwrap_or(0); + let stride: u32 = node.parse_attribute("stride")?.unwrap_or(1); let mut params = vec![]; for child in node.element_children() { @@ -548,7 +653,7 @@ impl Accessor { Ok(Self { count, - offset, + // offset, source, stride, params, @@ -556,54 +661,69 @@ impl Accessor { } } -#[allow(dead_code)] // TODO(material) -struct Param { - /// The name of this element. - name: Option, - /// The scoped identifier of this element. - sid: Option, +/// The `` element (data flow). +/// +/// See the specifications ([1.4], [1.5]) for details. +/// +/// [1.4]: https://www.khronos.org/files/collada_spec_1_4.pdf#page=125 +/// [1.5]: https://www.khronos.org/files/collada_spec_1_5.pdf#page=144 +struct Param<'a> { + // /// The name of this element. + // name: Option<&'a str>, + // /// The scoped identifier of this element. + // sid: Option<&'a str>, // Required - ty: String, - // Optional - semantic: Option, + ty: &'a str, + // // Optional + // semantic: Option<&'a str>, } -impl Param { +impl<'a> Param<'a> { /* The `` element (data flow) - Attributes: - `name` (xs:token, Optional) - `sid` (sid_type, Optional) - `type` (xs:NMTOKEN, Required) - `semantic` (xs:NMTOKEN, Optional) + + Child elements: None */ - fn parse(node: xml::Node<'_, '_>) -> io::Result { + fn parse(node: xml::Node<'a, '_>) -> io::Result { let ty = node.required_attribute("type")?; - let name = node.attribute("name"); - let sid = node.attribute("sid"); - let semantic = node.attribute("semantic"); + // let name = node.attribute("name"); + // let sid = node.attribute("sid"); + // let semantic = node.attribute("semantic"); + if let Some(child) = node.element_children().next() { + return Err(error::unexpected_child_elem(child)); + } Ok(Self { - name: name.map(Into::into), - sid: sid.map(Into::into), - ty: ty.into(), - semantic: semantic.map(Into::into), + // name, + // sid, + ty, + // semantic, }) } } -struct SharedInput { +/// The `` element (shared). +/// +/// See the specifications ([1.4], [1.5]) for details. +/// +/// [1.4]: https://www.khronos.org/files/collada_spec_1_4.pdf#page=73 +/// [1.5]: https://www.khronos.org/files/collada_spec_1_5.pdf#page=87 +struct SharedInput<'a, T = Accessor<'a>> { // Required offset: u32, // Required semantic: InputSemantic, // Required - source: Uri, + source: Uri<'a, T>, // Optional set: u32, } -impl SharedInput { +impl<'a, T> SharedInput<'a, T> { /* The `` element (shared) @@ -612,13 +732,18 @@ impl SharedInput { - `semantic` (xs:NMTOKEN, Required) - `source` (uri_fragment_type, Required) - `set` (uint_type, Optional) + + Child elements: None */ - fn parse(node: xml::Node<'_, '_>) -> io::Result { + fn parse(node: xml::Node<'a, '_>) -> io::Result { debug_assert_eq!(node.tag_name().name(), "input"); let semantic = node.parse_required_attribute("semantic")?; let source = node.parse_url("source")?; - let offset = node.parse_required_attribute("offset")?; - let set = node.parse_attribute("set")?.unwrap_or(0); + let offset: u32 = node.parse_required_attribute("offset")?; + let set: u32 = node.parse_attribute("set")?.unwrap_or(0); + if let Some(child) = node.element_children().next() { + return Err(error::unexpected_child_elem(child)); + } Ok(Self { offset, semantic, @@ -627,7 +752,7 @@ impl SharedInput { }) } - fn cast(self) -> SharedInput { + fn cast(self) -> SharedInput<'a, U> { SharedInput { offset: self.offset, semantic: self.semantic, @@ -637,31 +762,46 @@ impl SharedInput { } } -struct UnsharedInput { +/// The `` element (unshared). +/// +/// See the specifications ([1.4], [1.5]) for details. +/// +/// [1.4]: https://www.khronos.org/files/collada_spec_1_4.pdf#page=76 +/// [1.5]: https://www.khronos.org/files/collada_spec_1_5.pdf#page=90 +struct UnsharedInput<'a> { // Required semantic: InputSemantic, // Required - source: Uri, + source: Uri<'a, Accessor<'a>>, } -impl UnsharedInput { +impl<'a> UnsharedInput<'a> { /* The `` element (unshared) Attributes: - `semantic` (xs:NMTOKEN, Required) - `source` (uri_fragment_type, Required) + + Child elements: None */ - fn parse(node: xml::Node<'_, '_>) -> io::Result { + fn parse(node: xml::Node<'a, '_>) -> io::Result { debug_assert_eq!(node.tag_name().name(), "input"); let semantic = node.parse_required_attribute("semantic")?; let source = node.parse_url("source")?; + if let Some(child) = node.element_children().next() { + return Err(error::unexpected_child_elem(child)); + } Ok(Self { semantic, source }) } } -// refs: https://www.khronos.org/files/collada_spec_1_4.pdf#page=74 -// refs: https://www.khronos.org/files/collada_spec_1_5.pdf#page=88 +/// The value of the `semantic` attribute in the `` element. +/// +/// See the specifications ([1.4], [1.5]) for details. +/// +/// [1.4]: https://www.khronos.org/files/collada_spec_1_4.pdf#page=74 +/// [1.5]: https://www.khronos.org/files/collada_spec_1_5.pdf#page=88 #[allow(non_camel_case_types, clippy::upper_case_acronyms)] #[derive(Clone, Copy, PartialEq, Eq, Hash)] enum InputSemantic { diff --git a/src/collada/scene.rs b/src/collada/scene.rs new file mode 100644 index 0000000..4f9f63e --- /dev/null +++ b/src/collada/scene.rs @@ -0,0 +1,418 @@ +use super::*; + +/// The `` element. +/// +/// See the [specification][1.4] for details. +/// +/// [1.4]: https://www.khronos.org/files/collada_spec_1_4.pdf#page=102 +#[derive(Default)] +pub(super) struct LibraryVisualScenes<'a> { + // /// The unique identifier of this element. + // pub(super) id: Option<&'a str>, + // /// The name of this element. + // pub(super) name: Option<&'a str>, + pub(super) nodes: Vec>, +} + +/// The `` element. +/// +/// See the [specification][1.4] for details. +/// +/// [1.4]: https://www.khronos.org/files/collada_spec_1_4.pdf#page=119 +#[derive(Default)] +pub(super) struct Node<'a> { + // /// The unique identifier of this element. + // pub(super) id: Option<&'a str>, + // /// The name of this element. + // pub(super) name: Option<&'a str>, + // /// The scoped identifier of this element. + // pub(super) sid: Option<&'a str>, + // /// The type of this element. + // pub(super) ty: NodeType, + + // pub(super) parent: Option, + // pub(super) children: Vec, + + // pub(super) transforms: Vec>, + // pub(super) instance_camera: Vec, + // pub(super) instance_controller: Vec, + pub(super) instance_geometry: Vec>, + // pub(super) instance_light: Vec, + // pub(super) instance_node: Vec, +} + +/// The type of the [`Node`]. +#[derive(Debug)] +pub(super) enum NodeType { + Joint, + Node, +} + +impl Default for NodeType { + fn default() -> Self { + Self::Node + } +} + +/// The `` element. +/// +/// See the [specification][1.4] for details. +/// +/// [1.4]: https://www.khronos.org/files/collada_spec_1_4.pdf#page=85 +pub(super) struct InstanceGeometry<'a> { + // /// The scoped identifier of this element. + // pub(super) sid: Option<&'a str>, + // /// The name of this element. + // pub(super) name: Option<&'a str>, + /// The URI of the location of the [`Geometry`] to instantiate. + pub(super) url: Uri<'a, Geometry<'a>>, + + pub(super) materials: BTreeMap<&'a str, SemanticMappingTable<'a>>, +} + +/* +/// The `` element. +/// +/// See the [specification][1.4] for details. +/// +/// [1.4]: https://www.khronos.org/files/collada_spec_1_4.pdf#page=82 +pub(super) struct InstanceController<'a> { + /// The scoped identifier of this element. + pub(super) sid: Option<&'a str>, + /// The name of this element. + pub(super) name: Option<&'a str>, + /// The URI of the location of the [`Controller`] to instantiate. + pub(super) url: Uri, + + pub(super) materials: IndexMap<&'a str, SemanticMappingTable>, +} +*/ + +pub(super) struct SemanticMappingTable<'a> { + // Required + pub(super) target: Uri<'a, Material<'a>>, + // Required + pub(super) symbol: &'a str, + // pub(super) map: HashMap<&'a str, InputSemanticMapEntry>, +} + +// pub(super) struct InputSemanticMapEntry { +// pub(super) input_semantic: InputSemantic, +// pub(super) input_set: u32, +// } + +/* +/// The `` element. +/// +/// See the [specification][1.4] for details. +/// +/// [1.4]: https://www.khronos.org/files/collada_spec_1_4.pdf#page=80 +#[derive(Debug)] +#[non_exhaustive] +pub struct InstanceCamera<'a> { + /// The scoped identifier of this element. + pub sid: Option<&'a str>, + /// The name of this element. + pub name: Option<&'a str>, + /// The URI of the location of the [`Camera`] to instantiate. + pub url: Uri>, +} + +/// The `` element. +/// +/// See the [specification][1.4] for details. +/// +/// [1.4]: https://www.khronos.org/files/collada_spec_1_4.pdf#page=87 +#[derive(Debug)] +#[non_exhaustive] +pub struct InstanceLight<'a> { + /// The scoped identifier of this element. + pub sid: Option<&'a str>, + /// The name of this element. + pub name: Option<&'a str>, + /// The URI of the location of the [`Light`] to instantiate. + pub url: Uri>, +} + +/// The `` element. +/// +/// See the [specification][1.4] for details. +/// +/// [1.4]: https://www.khronos.org/files/collada_spec_1_4.pdf#page=89 +pub(super) struct InstanceNode<'a> { + /// The scoped identifier of this element. + pub(super) sid: Option<&'a str>, + /// The name of this element. + pub(super) name: Option<&'a str>, + /// The URI of the location of the [`Node`] to instantiate. + pub(super) url: Uri<'a, Node<'a>>, +} +*/ + +// ============================================================================= +// Parsing + +pub(super) fn parse_library_visual_scenes<'a>( + cx: &mut Context<'a>, + node: xml::Node<'a, '_>, +) -> io::Result<()> { + debug_assert_eq!(node.tag_name().name(), "library_visual_scenes"); + // cx.library_visual_scenes.id = node.attribute("id"); + // cx.library_visual_scenes.name = node.attribute("name"); + + for child in node.element_children() { + match child.tag_name().name() { + "visual_scene" => { + parse_visual_scene(child, &mut cx.library_visual_scenes.nodes)?; + } + "asset" | "extra" => { /* skip */ } + _ => return Err(error::unexpected_child_elem(child)), + } + } + + // if visual_scenes.is_empty() { + // error::one_or_more_elems(node, "visual_scene")?; + // } + + Ok(()) +} + +fn parse_visual_scene<'a>(node: xml::Node<'a, '_>, nodes: &mut Vec>) -> io::Result<()> { + debug_assert_eq!(node.tag_name().name(), "visual_scene"); + // The specification say it is optional, but it is actually required. + let _id = node.required_attribute("id")?; + let mut scene_nodes = vec![]; + let this = Node { + // id: Some(id), + // name: node.attribute("name"), + ..Default::default() + }; + let this_index = nodes.len(); + nodes.push(this); + + for child in node.element_children() { + match child.tag_name().name() { + "node" => { + scene_nodes.push(parse_node(child, nodes, this_index)?); + } + "evaluate_scene" => warn::unsupported_child_elem(child), + "asset" | "extra" => { /* skip */ } + _ => return Err(error::unexpected_child_elem(child)), + } + } + + Ok(()) +} + +/* +The `` element + +Attributes: +- `id` (xs:ID, Optional) +- `name` (xs:token, Optional) +- `sid` (sid_type, Optional) +- `type` (Enumeration, Optional) + The type of the element. Valid values are JOINT or NODE. + The default is NODE. +- `layer` (list_of_names_type, Optional) + +Child elements must appear in the following order if present: +- `` (0 or 1) +- transformation_elements (0 or more ) + Any combination of the following transformation elements: + - `` + - `` + - `` + - `` + - `` + - `` +- `` (0 or more) +- `` (0 or more) +- `` (0 or more) +- `` (0 or more) +- `` (0 or more) +- `` (0 or more) +- `` (0 or more) +*/ +fn parse_node<'a>( + node: xml::Node<'a, '_>, + nodes: &mut Vec>, + _parent: usize, +) -> io::Result { + debug_assert_eq!(node.tag_name().name(), "node"); + let _ty: NodeType = node.parse_attribute("type")?.unwrap_or_default(); + let this = Node { + // id: node.attribute("id"), + // name: node.attribute("name"), + // sid: node.attribute("sid"), + // ty, + // parent: Some(parent), + ..Default::default() + }; + let this_index = nodes.len(); + nodes.push(this); + + for child in node.element_children() { + match child.tag_name().name() { + "node" => { + let _c = parse_node(child, nodes, this_index)?; + // nodes[this_index].children.push(c); + } + + // transformation + "lookat" => {} + "matrix" => {} + "rotate" => {} + "scale" => {} + "skew" => {} + "translate" => {} + + // instances + "instance_camera" => {} + "instance_controller" => {} + "instance_geometry" => { + nodes[this_index] + .instance_geometry + .push(parse_instance_geometry(child)?); + } + "instance_light" => {} + "instance_node" => {} + + _ => {} + } + } + // TODO + + Ok(this_index) +} + +impl FromStr for NodeType { + type Err = io::Error; + + fn from_str(s: &str) -> Result { + Ok(match s { + "NODE" => Self::Node, + "JOINT" => Self::Joint, + _ => bail!("unknown note type {:?}", s), + }) + } +} + +fn parse_instance_geometry<'a>(node: xml::Node<'a, '_>) -> io::Result> { + debug_assert_eq!(node.tag_name().name(), "instance_geometry"); + let url = node.parse_url("url")?; + let mut materials = BTreeMap::new(); + + for child in node.element_children() { + match child.tag_name().name() { + "bind_material" => { + parse_bind_material(child, &mut materials)?; + } + "extra" => { /* skip */ } + _ => return Err(error::unexpected_child_elem(child)), + } + } + + Ok(InstanceGeometry { + // sid: node.attribute("sid"), + // name: node.attribute("name"), + url, + materials, + }) +} + +/* +The element + +Child elements must appear in the following order if present: +- `` (core) (0 or more) +- `` (1) +- `` (core) (0 or more) +- `` (0 or more) + +Child Elements for / +- `` (geometry) (1 or more) +*/ +fn parse_bind_material<'a>( + node: xml::Node<'a, '_>, + materials: &mut BTreeMap<&'a str, SemanticMappingTable<'a>>, +) -> io::Result<()> { + debug_assert_eq!(node.tag_name().name(), "bind_material"); + for child in node.element_children() { + match child.tag_name().name() { + "technique_common" => { + for instance_mat_node in child.element_children() { + match instance_mat_node.tag_name().name() { + "instance_material" => { + let table = parse_instance_material(instance_mat_node)?; + materials.insert(table.symbol, table); + } + _ => return Err(error::unexpected_child_elem(instance_mat_node)), + } + } + } + "param" | "technique" | "extra" => { /* skip */ } + _ => return Err(error::unexpected_child_elem(child)), + } + // TODO + } + Ok(()) +} + +/* +The element (geometry) + +Attributes: +- `sid` (sid_type, Optional) +- `name` (xs:token, Optional) +- `target` (xs:anyURI, Required) +- `symbol` (xs:NCName, Required) + +Child elements must appear in the following order if present: +- `` (FX) (0 or more) +- `` (0 or more) +- `` (0 or more) +*/ +fn parse_instance_material<'a>(node: xml::Node<'a, '_>) -> io::Result> { + debug_assert_eq!(node.tag_name().name(), "instance_material"); + let target = node.parse_url("target")?; + let symbol = node.required_attribute("symbol")?; + // let mut map = HashMap::new(); + + for child in node.element_children() { + match child.tag_name().name() { + "bind_vertex_input" => { + /* + The element + + Attributes: + - `semantic` (xs:NCName, Required) + - `input_semantic` (xs:NCName, Required) + - `input_set` (uint_type, Optional) + */ + + let _semantic = child.required_attribute("semantic")?; + let _input_semantic: InputSemantic = + child.parse_required_attribute("input_semantic")?; + let _input_set: u32 = child.parse_attribute("input_set")?.unwrap_or(0); + + // map.insert( + // semantic, + // InputSemanticMapEntry { + // input_semantic, + // input_set, + // }, + // ); + } + "bind" => warn::unsupported_child_elem(child), + "extra" => { /* skip */ } + _ => return Err(error::unexpected_child_elem(child)), + } + } + + Ok(SemanticMappingTable { + target, + symbol, + // map, + }) +} diff --git a/src/common.rs b/src/common.rs index 9c73dae..d16e12b 100644 --- a/src/common.rs +++ b/src/common.rs @@ -23,6 +23,7 @@ pub struct Scene { pub struct Mesh { pub name: String, pub vertices: Vec, + // TODO: use Vec3? pub texcoords: [Vec; MAX_NUMBER_OF_TEXCOORDS], pub normals: Vec, pub faces: Vec, @@ -102,26 +103,54 @@ impl fmt::Debug for Mesh { #[derive(Debug, Clone, Default)] #[non_exhaustive] pub struct Material { + // Refs: https://github.com/assimp/assimp/blob/v5.3.1/include/assimp/material.h#L944-L955 pub name: String, + pub shading_model: Option, + pub opacity: Option, + pub shininess: Option, + pub reflectivity: Option, + pub index_of_refraction: Option, + pub color: Colors, pub texture: Textures, } +// Refs: https://github.com/assimp/assimp/blob/v5.3.1/include/assimp/material.h#L956-L961 #[derive(Debug, Clone, Default)] #[non_exhaustive] pub struct Colors { - pub ambient: Option, pub diffuse: Option, + pub ambient: Option, pub specular: Option, pub emissive: Option, + pub transparent: Option, + pub reflective: Option, } +// Refs: https://github.com/assimp/assimp/blob/v5.3.1/include/assimp/material.h#L188 #[derive(Debug, Clone, Default)] #[non_exhaustive] pub struct Textures { - pub ambient: Option, pub diffuse: Option, pub specular: Option, + pub ambient: Option, pub emissive: Option, + pub height: Option, pub normal: Option, + pub shininess: Option, + pub opacity: Option, + pub displacement: Option, + pub lightmap: Option, + pub reflection: Option, +} + +// Refs: https://github.com/assimp/assimp/blob/v5.3.1/include/assimp/material.h#L355 +#[derive(Debug, Clone)] +#[non_exhaustive] +pub enum ShadingModel { + Flat, + Gouraud, + Phong, + Blinn, + NoShading, } diff --git a/src/loader.rs b/src/loader.rs index 115b71a..ce2dce7 100644 --- a/src/loader.rs +++ b/src/loader.rs @@ -175,9 +175,9 @@ impl> Loader { pub fn load_collada_from_slice>( &self, bytes: &[u8], - _path: P, + path: P, ) -> io::Result { - let scene = crate::collada::from_slice(bytes)?; + let scene = crate::collada::from_slice_internal(bytes, Some(path.as_ref()))?; Ok(self.post_process(scene)) } diff --git a/src/obj/mod.rs b/src/obj/mod.rs index bf51cda..0ab463f 100644 --- a/src/obj/mod.rs +++ b/src/obj/mod.rs @@ -19,7 +19,7 @@ use crate::{ float, int, utf16::decode_bytes, }, - Color4, Mesh, Scene, Vec2, Vec3, + Color4, Mesh, Scene, ShadingModel, Vec2, Vec3, }; /// Parses meshes from bytes of Wavefront OBJ text. @@ -827,22 +827,33 @@ fn read_mtl_internal( mtl_dir: Option<&Path>, ) -> Option { let mut p = texture?; - if p.is_empty() - || p.len() == 2 - && (p.starts_with(b".\\") || p.starts_with(b"./")) - { + if p.is_empty() { return None; } match mtl_dir { Some(mtl_dir) => { - p = p.strip_prefix(b".\\").unwrap_or(p); - p = p.strip_prefix(b"./").unwrap_or(p); + let tmp: Vec<_>; + if p.contains(&b'\\') { + tmp = p + .iter() + .map(|&b| if b == b'\\' { b'/' } else { b }) + .collect(); + p = &*tmp; + } + if p.starts_with(b"/..") { + p = p.strip_prefix(b"/").unwrap_or(p); + } let p = path_from_bytes(p).ok()?; - Some(mtl_dir.join(p)) + let p = mtl_dir.join(p); + if p.exists() { + Some(p) + } else { + None + } } None => { - let p = path_from_bytes(p).ok()?; - Some(p.to_owned()) + let p = path_from_bytes(p).ok()?.to_owned(); + Some(p) } } } @@ -850,18 +861,46 @@ fn read_mtl_internal( let material_index = materials.len() as u32; materials.push(common::Material { name: from_utf8_lossy(current_name).into_owned(), + // Refs: https://github.com/assimp/assimp/blob/v5.3.1/code/AssetLib/Obj/ObjFileImporter.cpp#L591 + shading_model: match mat.illumination_model { + Some(0) => Some(ShadingModel::NoShading), + Some(1) => Some(ShadingModel::Gouraud), + Some(2) => Some(ShadingModel::Phong), + _ => None, + }, + shininess: mat.shininess, + opacity: mat.alpha, + reflectivity: None, + index_of_refraction: mat.index_of_refraction, + // roughness_factor: mat.roughness, + // metallic_factor: mat.metallic, + // sheen_color_factor: mat.sheen, + // clearcoat_factor: mat.clearcoat_thickness, + // clearcoat_roughness_factor: mat.clearcoat_roughness, + // anisotropy_factor: mat.anisotropy, color: crate::Colors { ambient: color4(mat.ambient), diffuse: color4(mat.diffuse), specular: color4(mat.specular), emissive: color4(mat.emissive), + transparent: color4(mat.transparent), + reflective: None, }, texture: crate::Textures { - ambient: texture_path(mat.ambient_texture, mtl_dir), diffuse: texture_path(mat.diffuse_texture, mtl_dir), - specular: texture_path(mat.specular_texture, mtl_dir), + ambient: texture_path(mat.ambient_texture, mtl_dir), emissive: texture_path(mat.emissive_texture, mtl_dir), + specular: texture_path(mat.specular_texture, mtl_dir), + height: texture_path(mat.bump_texture, mtl_dir), normal: texture_path(mat.normal_texture, mtl_dir), + reflection: None, // TODO + displacement: texture_path( + mat.displacement_texture, + mtl_dir, + ), + opacity: texture_path(mat.opacity_texture, mtl_dir), + shininess: texture_path(mat.specularity_texture, mtl_dir), + lightmap: None, }, }); material_map.insert(current_name.to_owned(), material_index); @@ -1116,12 +1155,14 @@ struct Material<'a> { emissive_texture: Option<&'a [u8]>, bump_texture: Option<&'a [u8]>, normal_texture: Option<&'a [u8]>, + // reflection_texture: Option<&'a [u8]>, specularity_texture: Option<&'a [u8]>, opacity_texture: Option<&'a [u8]>, displacement_texture: Option<&'a [u8]>, roughness_texture: Option<&'a [u8]>, metallic_texture: Option<&'a [u8]>, sheen_texture: Option<&'a [u8]>, + // rma_texture: Option<&'a [u8]>, // Colors ambient: Option<[f32; 3]>, @@ -1140,6 +1181,7 @@ struct Material<'a> { clearcoat_thickness: Option, clearcoat_roughness: Option, anisotropy: Option, + // bump_multiplier: Option, } const __: u8 = 0; diff --git a/src/utils/hex.rs b/src/utils/hex.rs new file mode 100644 index 0000000..e44a0cb --- /dev/null +++ b/src/utils/hex.rs @@ -0,0 +1,194 @@ +// Based on https://github.com/KokaKiwi/rust-hex/pull/62, but with several additional optimizations. + +use std::{io, mem}; + +const __: u8 = u8::MAX; + +// Lookup table for ascii to hex decoding. +#[rustfmt::skip] +static DECODE_TABLE: [u8; 256] = [ + // 1 2 3 4 5 6 7 8 9 A B C D E F + __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // 0 + __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // 1 + __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // 2 + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, __, __, __, __, __, __, // 3 + __, 10, 11, 12, 13, 14, 15, __, __, __, __, __, __, __, __, __, // 4 + __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // 5 + __, 10, 11, 12, 13, 14, 15, __, __, __, __, __, __, __, __, __, // 6 + __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // 7 + __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // 8 + __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // 9 + __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // A + __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // B + __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // C + __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // D + __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // E + __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, __, // F +]; + +#[inline] +pub(crate) fn decode(bytes: &[u8]) -> io::Result> { + if bytes.len() % 2 != 0 { + bail!("invalid length {}", bytes.len()); + } + let mut out = vec![0; bytes.len() / 2]; + // Using hex2byte16 instead of hex2byte here increases throughput by 1.5x, + // but it also increases binary size. + // let hex2byte = hex2byte16; + let hex2byte = hex2byte; + decode_to_slice(bytes, &mut out, hex2byte)?; + Ok(out) +} + +#[inline] +fn decode_to_slice( + bytes: &[u8], + out: &mut [u8], + hex2byte: fn(&[u8], &mut u8) -> io::Result<()>, +) -> io::Result<()> { + const CHUNK_SIZE: usize = mem::size_of::(); + // First, process the data in usize units. This improves performance by + // reducing the number of writes to memory. + let mut bytes = bytes.chunks_exact(CHUNK_SIZE); + let mut out = out.chunks_exact_mut(CHUNK_SIZE / 2); + for (bytes, out) in bytes.by_ref().zip(out.by_ref()) { + let mut num = [0; CHUNK_SIZE / 2]; + for (bytes, num) in bytes.chunks_exact(2).zip(&mut num) { + hex2byte(bytes, num)?; + } + out.copy_from_slice(&num); + } + // Then process the remaining data. + let bytes = bytes.remainder(); + let out = out.into_remainder(); + for (bytes, out) in bytes.chunks_exact(2).zip(out) { + hex2byte(bytes, out)?; + } + Ok(()) +} + +#[inline] +fn hex2byte(bytes: &[u8], out: &mut u8) -> io::Result<()> { + let upper = DECODE_TABLE[bytes[0] as usize]; + let lower = DECODE_TABLE[bytes[1] as usize]; + if upper == u8::MAX { + bail!("invalid hex character {}", bytes[0] as char); + } + if lower == u8::MAX { + bail!("invalid hex character {}", bytes[1] as char); + } + *out = (upper << 4) | lower; + Ok(()) +} + +#[cfg(test)] +static ENCODE_LOWER_TABLE: &[u8; 16] = b"0123456789abcdef"; +#[cfg(test)] +static ENCODE_UPPER_TABLE: &[u8; 16] = b"0123456789ABCDEF"; +#[cfg(test)] +#[inline] +const fn byte2hex(byte: u8, table: &[u8; 16]) -> [u8; 2] { + let upper = table[((byte & 0xf0) >> 4) as usize]; + let lower = table[(byte & 0x0f) as usize]; + [upper, lower] +} + +#[cfg(test)] +#[inline] +fn hex2byte16(bytes: &[u8], out: &mut u8) -> io::Result<()> { + static DECODE_TABLE: [u8; 65536] = { + let mut table = [__; 65536]; + let mut i = 0; + while i != u8::MAX { + let lower = u16::from_ne_bytes(byte2hex(i, ENCODE_LOWER_TABLE)); + let upper = u16::from_ne_bytes(byte2hex(i, ENCODE_UPPER_TABLE)); + table[lower as usize] = i; + table[upper as usize] = i; + i += 1; + } + table + }; + let n = u16::from_ne_bytes(bytes.try_into().unwrap()); + let num = DECODE_TABLE[n as usize]; + if num == u8::MAX { + bail!( + "invalid hex character {}{}", + bytes[0] as char, + bytes[1] as char + ); + } + *out = num; + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + fn encode_naive(bytes: &[u8], table: &[u8; 16]) -> Vec { + let mut out = vec![0; bytes.len() * 2]; + for (&byte, out) in bytes.iter().zip(out.chunks_exact_mut(2)) { + out.copy_from_slice(&byte2hex(byte, table)); + } + out + } + fn decode_naive( + bytes: &[u8], + hex2byte: fn(&[u8], &mut u8) -> io::Result<()>, + ) -> io::Result> { + if bytes.len() % 2 != 0 { + bail!("invalid length {}", bytes.len()); + } + let mut out = vec![0; bytes.len() / 2]; + for (bytes, out) in bytes.chunks_exact(2).zip(&mut out) { + hex2byte(bytes, out)?; + } + Ok(out) + } + #[inline] + fn decode16(bytes: &[u8]) -> io::Result> { + if bytes.len() % 2 != 0 { + bail!("invalid length {}", bytes.len()); + } + let mut out = vec![0; bytes.len() / 2]; + decode_to_slice(bytes, &mut out, hex2byte16)?; + Ok(out) + } + + ::quickcheck::quickcheck! { + fn decode_valid(x: String) -> bool { + if x.is_empty() { + return true; + } + let x = x.as_bytes(); + let hex_lower = encode_naive(x, ENCODE_LOWER_TABLE); + assert_eq!(decode(&hex_lower).unwrap(), x); + assert_eq!(decode16(&hex_lower).unwrap(), x); + assert_eq!(decode_naive(&hex_lower, hex2byte).unwrap(), x); + assert_eq!(decode_naive(&hex_lower, hex2byte16).unwrap(), x); + let hex_upper = encode_naive(x, ENCODE_UPPER_TABLE); + assert_eq!(decode(&hex_upper).unwrap(), x); + assert_eq!(decode16(&hex_lower).unwrap(), x); + assert_eq!(decode_naive(&hex_upper, hex2byte).unwrap(), x); + assert_eq!(decode_naive(&hex_upper, hex2byte16).unwrap(), x); + true + } + fn decode_invalid(x: String) -> bool { + if x.is_empty() { + return true; + } + let mut x = x.as_bytes(); + if x.len() < 2 { + return true; + } + if x.len() % 2 != 0 { + x = &x[..x.len() - 2]; + } + let res = decode(x).ok(); + assert_eq!(res, decode16(x).ok()); + assert_eq!(res, decode_naive(x, hex2byte).ok()); + assert_eq!(res, decode_naive(x, hex2byte16).ok()); + true + } + } +} diff --git a/src/utils/mod.rs b/src/utils/mod.rs index f215d28..d723401 100644 --- a/src/utils/mod.rs +++ b/src/utils/mod.rs @@ -1,6 +1,8 @@ pub(crate) mod bytes; #[cfg(any(feature = "collada", feature = "obj", feature = "stl"))] pub mod float; +#[cfg(feature = "collada")] +pub(crate) mod hex; #[cfg(any(feature = "collada", feature = "obj"))] pub mod int; #[cfg(feature = "collada")] diff --git a/src/utils/xml.rs b/src/utils/xml.rs index c6f68b2..45b58fc 100644 --- a/src/utils/xml.rs +++ b/src/utils/xml.rs @@ -1,6 +1,6 @@ // A module that provides utilities for parsing and visiting XML nodes. -use std::{fmt, io, iter, marker::PhantomData, str::FromStr}; +use std::{borrow::Cow, fmt, io, iter, marker::PhantomData, str::FromStr}; pub(crate) use roxmltree::*; @@ -23,6 +23,16 @@ pub(crate) fn trim_start(s: &str) -> &str { s.trim_start_matches(is_whitespace) } +// TODO: https://stackoverflow.com/questions/4325363/converting-a-number-with-comma-as-decimal-point-to-float +#[inline] +pub(crate) fn comma_to_period(s: &str) -> Cow<'_, str> { + if s.as_bytes().contains(&b',') { + s.replace(',', ".").into() + } else { + s.into() + } +} + // ----------------------------------------------------------------------------- // Parsing array @@ -32,7 +42,7 @@ where T: int::Integer, { ParseIntArray { - text: trim_start(text), + text, _marker: PhantomData, } } @@ -69,7 +79,7 @@ where T: float::Float, { ParseFloatArray { - text: trim_start(text), + text, _marker: PhantomData, } } @@ -106,7 +116,7 @@ where T: float::Float, { ParseFloatArrayExact { - text: trim_start(text), + text, num, count: 0, _marker: PhantomData, @@ -167,6 +177,7 @@ pub(crate) trait XmlNodeExt<'a, 'input> { where T: FromStr, T::Err: fmt::Display; + fn trimmed_text(&self) -> &'a str; fn node_location(&self) -> TextPos; fn attr_location(&self, name: &str) -> TextPos; } @@ -243,6 +254,10 @@ impl<'a, 'input> XmlNodeExt<'a, 'input> for Node<'a, 'input> { }) } + fn trimmed_text(&self) -> &'a str { + trim(self.text().unwrap_or_default()) + } + #[cold] fn node_location(&self) -> TextPos { let range = self.range(); diff --git a/tests/assimp.rs b/tests/assimp.rs index 44d27f3..b5ebf5d 100644 --- a/tests/assimp.rs +++ b/tests/assimp.rs @@ -2,12 +2,7 @@ clippy::match_same_arms, // https://github.com/rust-lang/rust-clippy/issues/12044 )] -use std::{ - collections::BTreeSet, - ffi::OsStr, - path::{Path, PathBuf}, - str, -}; +use std::{collections::BTreeSet, ffi::OsStr, path::Path, str}; use anyhow::Result; use duct::cmd; @@ -17,10 +12,10 @@ use walkdir::WalkDir; #[test] fn test() { let manifest_dir = Path::new(env!("CARGO_MANIFEST_DIR")); - let download_dir = &manifest_dir.join("tests/fixtures"); + let assimp_dir = &manifest_dir.join("tests/fixtures/assimp"); - clone(download_dir, "assimp/assimp", &["/test/models/"]).unwrap(); - let models = &download_dir.join("assimp/assimp/test/models"); + clone(assimp_dir, "assimp/assimp", &["/test/models/"]).unwrap(); + let models = &assimp_dir.join("test/models"); let mut collada_models = BTreeSet::new(); let mut obj_models = BTreeSet::new(); @@ -59,7 +54,7 @@ fn test() { for (i, m) in ml.meshes.iter().enumerate() { eprintln!("ml.meshes[{i}]={m:?}"); } - let ml = mesh_loader::Mesh::merge(ml.meshes); + let ml = &mesh_loader::Mesh::merge(ml.meshes); eprintln!("merge(ml.meshes)={ml:?}"); // assert_ne!(ml.vertices.len(), 0); assert_eq!(ml.vertices.len(), ml.faces.len() * 3); @@ -92,42 +87,14 @@ fn test() { _ => {} } let ai = assimp_importer.read_file(path.to_str().unwrap()).unwrap(); - let ai_vertices = ai - .mesh_iter() - .flat_map(|mesh| { - mesh.vertex_iter() - .map(|v| [v.x, v.y, v.z]) - .collect::>() - }) - .collect::>(); - let mut last = 0; - let ai_faces = ai - .mesh_iter() - .flat_map(|mesh| { - let f = mesh - .face_iter() - .filter_map(|f| { - if f.num_indices == 3 { - Some([f[0] + last, f[1] + last, f[2] + last]) - } else { - assert!(f.num_indices < 3, "should be triangulated"); - None - } - }) - .collect::>(); - if !f.is_empty() { - last = f.last().unwrap()[2] + 1; - } - f - }) - .collect::>(); + let ai = &merge_assimp_meshes(&ai); // TODO if !matches!( filename, "ConcavePolygon.dae" | "cameras.dae" | "lights.dae" | "teapot_instancenodes.DAE" ) { - assert_eq!(ml.faces.len(), ai_faces.len()); + assert_eq!(ml.faces.len(), ai.faces.len()); // TODO if !matches!( filename, @@ -143,9 +110,7 @@ fn test() { | "sphere.dae" | "teapots.DAE" ) { - for (ml, ai) in ml.faces.iter().copied().zip(ai_faces) { - assert_eq!(ml, ai); - } + assert_faces(ml, ai); } } // TODO @@ -165,7 +130,8 @@ fn test() { | "sphere.dae" | "teapot_instancenodes.DAE" ) { - assert_eq!(ml.vertices.len(), ai_vertices.len()); + assert_eq!(ml.vertices.len(), ai.vertices.len()); + assert_eq!(ml.normals.len(), ai.normals.len()); // TODO if !matches!( filename, @@ -178,32 +144,27 @@ fn test() { | "regr01.dae" | "teapots.DAE" ) { - let mut first = true; - let mut x = 1.; - for (j, (ml, ai)) in ml.vertices.iter().copied().zip(ai_vertices).enumerate() { - for i in 0..ml.len() { - let eps = f32::EPSILON * 1000.; - let (a, b) = (ml[i], ai[i]); - if first { - first = false; - if (a - b).abs() < eps { - continue; - } - // TODO - if (a - b * 100.).abs() < eps { - x = 100.; - continue; - } - } - assert!( - (a - b * x).abs() < eps, - "assertion failed: `(left !== right)` \ - (left: `{a:?}`, right: `{b:?}`, expect diff: `{eps:?}`, real diff: `{:?}`) \ - at vertices[{j}][{i}]", - (a - b).abs() - ); - } - } + assert_vertices(ml, ai, f32::EPSILON * 1000.); + } + if !matches!( + filename, + "Cinema4D.dae" + | "cube_tristrips.dae" + | "earthCylindrical.DAE" + | "kwxport_test_vcolors.dae" + | "regr01.dae" + | "teapots.DAE" + ) { + assert_normals(ml, ai, f32::EPSILON * 10.); + } + if !matches!( + filename, + "Cinema4D.dae" | "earthCylindrical.DAE" | "regr01.dae" | "teapots.DAE" + ) { + assert_texcoords0(ml, ai, f32::EPSILON); + } + if !matches!(filename, "Cinema4D.dae" | "kwxport_test_vcolors.dae") { + assert_colors0(ml, ai, f32::EPSILON); } } } @@ -213,38 +174,42 @@ fn test() { eprintln!(); eprintln!("parsing {:?}", path.strip_prefix(manifest_dir).unwrap()); let filename = path.file_name().unwrap().to_str().unwrap(); - match filename { - // no mesh - "point_cloud.obj" - // no face - | "testline.obj" | "testpoints.obj" - => continue, - _ => {} - } // mesh-loader - match filename { - // number parsing issue - "number_formats.obj" - // TODO: should not be allowed - | "empty.obj" | "malformed2.obj" => continue, - _ => {} - } - if path.parent().unwrap().file_name().unwrap() == "invalid" { - let _e = mesh_loader.load(path).unwrap_err(); - let _e = assimp_importer - .read_file(path.to_str().unwrap()) - .map(drop) - .unwrap_err(); + if path.parent().unwrap().file_name().unwrap() == "invalid" + && !matches!(filename, "malformed2.obj") + || matches!(filename, "point_cloud.obj" | "number_formats.obj") + { + if matches!(filename, "point_cloud.obj" | "empty.obj") { + // TODO: should not be allowed + let _s = mesh_loader.load(path).unwrap(); + } else { + let _e = mesh_loader.load(path).unwrap_err(); + } + // TODO: assimp accepts number format that mesh-loader doesn't accept. + if matches!(filename, "number_formats.obj") + || matches!(filename, "point_cloud.obj") && option_env!("CI").is_some() + { + let _s = assimp_importer.read_file(path.to_str().unwrap()).unwrap(); + } else { + let _e = assimp_importer + .read_file(path.to_str().unwrap()) + .err() + .unwrap(); + } continue; } let ml = mesh_loader.load(path).unwrap(); for (i, m) in ml.meshes.iter().enumerate() { eprintln!("ml.meshes[{i}]={m:?}"); } - let ml = mesh_loader::Mesh::merge(ml.meshes); + let ml = &mesh_loader::Mesh::merge(ml.meshes); eprintln!("merge(ml.meshes)={ml:?}"); - assert_ne!(ml.vertices.len(), 0); + if matches!(filename, "testline.obj" | "testpoints.obj") { + assert_eq!(ml.vertices.len(), 0); + } else { + assert_ne!(ml.vertices.len(), 0); + } assert_eq!(ml.vertices.len(), ml.faces.len() * 3); if ml.normals.is_empty() { // assert_eq!(ml.normals.capacity(), 0); @@ -268,19 +233,7 @@ fn test() { // assimp match filename { - // segmentation fault... - "box.obj" - | "box_longline.obj" - | "box_mat_with_spaces.obj" - | "box_without_lineending.obj" - | "multiple_spaces.obj" - | "only_a_part_of_vertexcolors.obj" - | "regr_3429812.obj" - | "regr01.obj" - | "testmixed.obj" => continue, - // no mesh... - "box_UTF16BE.obj" => continue, - // less number of faces loaded... + // Less faces loaded only in CI... "cube_with_vertexcolors.obj" | "cube_with_vertexcolors_uni.obj" if option_env!("CI").is_some() => { @@ -289,105 +242,39 @@ fn test() { _ => {} } let ai = assimp_importer.read_file(path.to_str().unwrap()).unwrap(); - // assert_eq!(ai.num_meshes, 1); - // assert_eq!(ai.num_meshes, ai.num_materials); - let ai = ai.mesh(0).unwrap(); - // assert_eq!(ai.num_vertices, ai.num_faces * 3); - assert_eq!(ai.num_vertices as usize, ai.vertex_iter().count()); - assert_eq!(ai.num_vertices as usize, ai.normal_iter().count()); - if ai.has_texture_coords(0) { - assert_eq!(ai.num_vertices as usize, ai.texture_coords_iter(0).count()); - } - if ai.has_vertex_colors(0) { - assert_eq!(ai.num_vertices as usize, ai.vertex_color_iter(0).count()); - } - assert!(!ai.has_texture_coords(1)); + let ai = &merge_assimp_meshes(&ai); // TODO if !matches!( filename, - "concave_polygon.obj" | "space_in_material_name.obj" | "spider.obj" | "cube_usemtl.obj" - ) { - assert_eq!(ml.faces.len(), ai.num_faces as usize); - for (ml, ai) in ml - .faces - .iter() - .copied() - .zip(ai.face_iter().map(|f| [f[0], f[1], f[2]])) - { - assert_eq!(ml, ai); - } - } - if !matches!( - filename, - "concave_polygon.obj" | "space_in_material_name.obj" | "spider.obj" | "cube_usemtl.obj" + "box.obj" + | "box_UTF16BE.obj" + | "box_longline.obj" + | "box_mat_with_spaces.obj" + | "box_without_lineending.obj" + | "concave_polygon.obj" + | "cube_usemtl.obj" + | "multiple_spaces.obj" + | "only_a_part_of_vertexcolors.obj" + | "regr_3429812.obj" + | "regr01.obj" + | "space_in_material_name.obj" + | "spider.obj" + | "testmixed.obj" ) { - assert_eq!(ml.vertices.len(), ai.num_vertices as usize); - assert_eq!(ml.normals.len(), ai.num_vertices as usize); - if !matches!(filename, "cube_usemtl.obj") { - for (j, (ml, ai)) in ml - .vertices - .iter() - .copied() - .zip(ai.vertex_iter().map(|f| [f.x, f.y, f.z])) - .enumerate() - { - let eps = f32::EPSILON * 10.; - for i in 0..ml.len() { - let (a, b) = (ml[i], ai[i]); - assert!( - (a - b).abs() < eps, - "assertion failed: `(left !== right)` \ - (left: `{a:?}`, right: `{b:?}`, expect diff: `{eps:?}`, \ - real diff: `{:?}`) at vertices[{j}][{i}]", - (a - b).abs() - ); - } - } - for (j, (ml, ai)) in ml - .normals - .iter() - .copied() - .zip(ai.normal_iter().map(|f| [f.x, f.y, f.z])) - .enumerate() - { - let eps = f32::EPSILON; - for i in 0..ml.len() { - let (a, b) = (ml[i], ai[i]); - assert!( - (a - b).abs() < eps, - "assertion failed: `(left !== right)` \ - (left: `{a:?}`, right: `{b:?}`, expect diff: `{eps:?}`, \ - real diff: `{:?}`) at normals[{j}][{i}]", - (a - b).abs() - ); - } - } - } - if ai.has_vertex_colors(0) { - assert_eq!(ml.colors[0].len(), ai.num_vertices as usize); - for (j, (ml, ai)) in ml.colors[0] - .iter() - .copied() - .zip(ai.vertex_color_iter(0).map(|f| [f.r, f.g, f.b, f.a])) - .enumerate() - { - let eps = f32::EPSILON; - for i in 0..ml.len() { - let (a, b) = (ml[i], ai[i]); - assert!( - (a - b).abs() < eps, - "assertion failed: `(left !== right)` \ - (left: `{a:?}`, right: `{b:?}`, expect diff: `{eps:?}`, \ - real diff: `{:?}`) at colors[0][{j}][{i}]", - (a - b).abs() - ); - assert!(a >= 0. && a <= 100.); + assert_eq!(ml.faces.len(), ai.faces.len()); + if !matches!(filename, "malformed2.obj") { + assert_faces(ml, ai); + if !matches!(filename, "testline.obj" | "testpoints.obj") { + assert_eq!(ml.vertices.len(), ai.vertices.len()); + if !matches!(filename, "cube_usemtl.obj") { + assert_vertices(ml, ai, f32::EPSILON * 10.); } } - } else { - assert_eq!(ml.colors[0].len(), 0); } + assert_normals(ml, ai, f32::EPSILON); + assert_texcoords0(ml, ai, f32::EPSILON); + assert_colors0(ml, ai, f32::EPSILON); } } @@ -402,7 +289,7 @@ fn test() { for (i, m) in ml.meshes.iter().enumerate() { eprintln!("ml.meshes[{i}]={m:?}"); } - let ml = mesh_loader::Mesh::merge(ml.meshes); + let ml = &mesh_loader::Mesh::merge(ml.meshes); eprintln!("merge(ml.meshes)={ml:?}"); assert_ne!(ml.vertices.len(), 0); assert_eq!(ml.vertices.len(), ml.faces.len() * 3); @@ -431,93 +318,146 @@ fn test() { let ai = assimp_importer.read_file(path.to_str().unwrap()).unwrap(); assert_eq!(ai.num_meshes, 1); assert_eq!(ai.num_meshes, ai.num_materials); - let ai = ai.mesh(0).unwrap(); - assert_eq!(ai.num_vertices, ai.num_faces * 3); - assert_eq!(ai.num_vertices as usize, ai.vertex_iter().count()); - assert_eq!(ai.num_vertices as usize, ai.normal_iter().count()); - assert!(!ai.has_texture_coords(0)); - if ai.has_vertex_colors(0) { - assert_eq!(ai.num_vertices as usize, ai.vertex_color_iter(0).count()); + { + let ai = ai.mesh(0).unwrap(); + assert_eq!(ai.num_vertices, ai.num_faces * 3); + assert_eq!(ai.num_vertices as usize, ai.vertex_iter().count()); + assert_eq!(ai.num_vertices as usize, ai.normal_iter().count()); + assert!(!ai.has_texture_coords(0)); + if ai.has_vertex_colors(0) { + assert_eq!(ai.num_vertices as usize, ai.vertex_color_iter(0).count()); + } + assert!(!ai.has_texture_coords(1)); } - assert!(!ai.has_texture_coords(1)); + let ai = &merge_assimp_meshes(&ai); - assert_eq!(ml.faces.len(), ai.num_faces as usize); - for (ml, ai) in ml - .faces - .iter() - .copied() - .zip(ai.face_iter().map(|f| [f[0], f[1], f[2]])) - { - assert_eq!(ml, ai); + assert_faces(ml, ai); + assert_vertices(ml, ai, f32::EPSILON * 10.); + assert_normals(ml, ai, f32::EPSILON); + assert_texcoords0(ml, ai, f32::EPSILON); + assert_colors0(ml, ai, f32::EPSILON); + } +} + +fn merge_assimp_meshes(ai: &assimp::Scene<'_>) -> mesh_loader::Mesh { + println!( + "ai.num_meshes={},ai.num_materials={}", + ai.num_meshes, ai.num_materials + ); + let mut vertices = vec![]; + let mut texcoords0 = vec![]; + let mut normals = vec![]; + let mut faces = vec![]; + let mut colors0 = vec![]; + for mesh in ai.mesh_iter() { + #[allow(clippy::cast_possible_truncation)] + let last = vertices.len() as u32; + vertices.extend(mesh.vertex_iter().map(|v| [v.x, v.y, v.z])); + if mesh.has_texture_coords(0) { + texcoords0.extend(mesh.texture_coords_iter(0).map(|v| [v.x, v.y])); } - assert_eq!(ml.vertices.len(), ai.num_vertices as usize); - assert_eq!(ml.normals.len(), ai.num_vertices as usize); - for (j, (ml, ai)) in ml - .vertices - .iter() - .copied() - .zip(ai.vertex_iter().map(|f| [f.x, f.y, f.z])) - .enumerate() - { - let eps = f32::EPSILON * 10.; - for i in 0..ml.len() { - let (a, b) = (ml[i], ai[i]); - assert!( - (a - b).abs() < eps, - "assertion failed: `(left !== right)` \ - (left: `{a:?}`, right: `{b:?}`, expect diff: `{eps:?}`, real diff: `{:?}`) \ - at vertices[{j}][{i}]", - (a - b).abs() - ); - } + // assimp-rs segfault without this null check. + if !mesh.normals.is_null() { + normals.extend(mesh.normal_iter().map(|v| [v.x, v.y, v.z])); } - for (j, (ml, ai)) in ml - .normals - .iter() - .copied() - .zip(ai.normal_iter().map(|f| [f.x, f.y, f.z])) - .enumerate() - { - let eps = f32::EPSILON; - for i in 0..ml.len() { - let (a, b) = (ml[i], ai[i]); - assert!( - (a - b).abs() < eps, - "assertion failed: `(left !== right)` \ - (left: `{a:?}`, right: `{b:?}`, expect diff: `{eps:?}`, real diff: `{:?}`) \ - at normals[{j}][{i}]", - (a - b).abs() - ); - } + if mesh.has_vertex_colors(0) { + colors0.extend(mesh.vertex_color_iter(0).map(|v| [v.r, v.g, v.b, v.a])); } - if ai.has_vertex_colors(0) { - assert_eq!(ml.colors[0].len(), ai.num_vertices as usize); - for (j, (ml, ai)) in ml.colors[0] - .iter() - .copied() - .zip(ai.vertex_color_iter(0).map(|f| [f.r, f.g, f.b, f.a])) - .enumerate() - { - let eps = f32::EPSILON; - for i in 0..ml.len() { - let (a, b) = (ml[i], ai[i]); - assert!( - (a - b).abs() < eps, - "assertion failed: `(left !== right)` \ - (left: `{a:?}`, right: `{b:?}`, expect diff: `{eps:?}`, \ - real diff: `{:?}`) at colors[0][{j}][{i}]", - (a - b).abs() - ); - assert!(a >= 0. && a <= 100.); - } + faces.extend(mesh.face_iter().filter_map(|f| { + if f.num_indices == 3 { + Some([f[0] + last, f[1] + last, f[2] + last]) + } else { + assert!(f.num_indices < 3, "should be triangulated"); + None } - } else { - assert_eq!(ml.colors[0].len(), 0); + })); + } + let mut mesh = mesh_loader::Mesh::default(); + mesh.vertices = vertices; + mesh.texcoords[0] = texcoords0; + mesh.normals = normals; + mesh.faces = faces; + mesh.colors[0] = colors0; + mesh +} + +#[track_caller] +fn assert_faces(ml: &mesh_loader::Mesh, ai: &mesh_loader::Mesh) { + assert_eq!(ml.faces.len(), ai.faces.len()); + for (i, (ml, ai)) in ml.faces.iter().zip(&ai.faces).enumerate() { + assert_eq!(ml, ai, "faces[{i}]"); + } +} +#[track_caller] +fn assert_vertices(ml: &mesh_loader::Mesh, ai: &mesh_loader::Mesh, eps: f32) { + assert_eq!(ml.vertices.len(), ai.vertices.len()); + for (i, (ml, ai)) in ml.vertices.iter().zip(&ai.vertices).enumerate() { + for j in 0..ml.len() { + let (a, b) = (ml[j], ai[j]); + assert!( + (a - b).abs() < eps, + "assertion failed: `(left !== right)` \ + (left: `{a:?}`, right: `{b:?}`, expect diff: `{eps:?}`, \ + real diff: `{:?}`) at vertices[{i}][{j}]", + (a - b).abs() + ); + } + } +} +#[track_caller] +fn assert_normals(ml: &mesh_loader::Mesh, ai: &mesh_loader::Mesh, eps: f32) { + assert_eq!(ml.normals.len(), ai.normals.len()); + for (i, (ml, ai)) in ml.normals.iter().zip(&ai.normals).enumerate() { + for j in 0..ml.len() { + let (a, b) = (ml[j], ai[j]); + assert!( + (a - b).abs() < eps, + "assertion failed: `(left !== right)` \ + (left: `{a:?}`, right: `{b:?}`, expect diff: `{eps:?}`, \ + real diff: `{:?}`) at normals[{i}][{j}]", + (a - b).abs() + ); + } + } +} +#[track_caller] +fn assert_texcoords0(ml: &mesh_loader::Mesh, ai: &mesh_loader::Mesh, eps: f32) { + assert_eq!(ml.texcoords[0].len(), ai.texcoords[0].len()); + for (i, (ml, ai)) in ml.texcoords[0].iter().zip(&ai.texcoords[0]).enumerate() { + for j in 0..ml.len() { + let (a, b) = (ml[j], ai[j]); + assert!( + (a - b).abs() < eps, + "assertion failed: `(left !== right)` \ + (left: `{a:?}`, right: `{b:?}`, expect diff: `{eps:?}`, \ + real diff: `{:?}`) at texcoords[0][{i}][{j}]", + (a - b).abs() + ); + } + } +} +#[track_caller] +fn assert_colors0(ml: &mesh_loader::Mesh, ai: &mesh_loader::Mesh, eps: f32) { + assert_eq!(ml.colors[0].len(), ai.colors[0].len()); + for (i, (ml, ai)) in ml.colors[0].iter().zip(&ai.colors[0]).enumerate() { + for j in 0..ml.len() { + let (a, b) = (ml[j], ai[j]); + assert!( + (a - b).abs() < eps, + "assertion failed: `(left !== right)` \ + (left: `{a:?}`, right: `{b:?}`, expect diff: `{eps:?}`, \ + real diff: `{:?}`) at colors[0][{i}][{j}]", + (a - b).abs() + ); + assert!(a >= 0. && a <= 100.); } } } -fn clone(download_dir: &Path, repository: &str, sparse_checkout: &[&str]) -> Result { +#[track_caller] +fn clone(src_dir: &Path, repository: &str, sparse_checkout: &[&str]) -> Result<()> { + assert!(!repository.is_empty()); + assert!(!sparse_checkout.is_empty()); let name = repository.strip_suffix(".git").unwrap_or(repository); assert!(!name.contains("://"), "{}", name); let repository = if repository.contains("://") { @@ -525,43 +465,36 @@ fn clone(download_dir: &Path, repository: &str, sparse_checkout: &[&str]) -> Res } else { format!("https://github.com/{repository}.git") }; - let src_dir = download_dir.join(name); if !src_dir.exists() { fs::create_dir_all(src_dir.parent().unwrap())?; - if sparse_checkout.is_empty() { - cmd!("git", "clone", "--depth", "1", repository, &src_dir).run()?; - } else { - cmd!( - "git", - "clone", - "--depth", - "1", - "--filter=tree:0", - "--no-checkout", - repository, - &src_dir, - ) - .run()?; - } - } - if !sparse_checkout.is_empty() { - cmd!("git", "sparse-checkout", "init").dir(&src_dir).run()?; - let mut out = String::from("/*\n!/*/\n"); // always download top-level files - out.push_str(&sparse_checkout.join("\n")); - fs::write(src_dir.join(".git/info/sparse-checkout"), out)?; - cmd!("git", "checkout") - .dir(&src_dir) - .stdout_capture() - .run()?; + cmd!( + "git", + "clone", + "--depth", + "1", + "--filter=tree:0", + "--no-checkout", + repository, + &src_dir, + ) + .run()?; } + cmd!("git", "sparse-checkout", "init").dir(src_dir).run()?; + let mut out = String::from("/*\n!/*/\n"); // always download top-level files + out.push_str(&sparse_checkout.join("\n")); + fs::write(src_dir.join(".git/info/sparse-checkout"), out)?; + cmd!("git", "checkout") + .dir(src_dir) + .stdout_capture() + .run()?; cmd!("git", "clean", "-df") - .dir(&src_dir) + .dir(src_dir) .stdout_capture() .run()?; // TODO: use stash? cmd!("git", "checkout", ".") - .dir(&src_dir) + .dir(src_dir) .stderr_capture() .run()?; - Ok(src_dir) + Ok(()) }