Vendor dependencies for 0.3.0 release

This commit is contained in:
2025-09-27 10:29:08 -05:00
parent 0c8d39d483
commit 82ab7f317b
26803 changed files with 16134934 additions and 0 deletions

1
vendor/gltf/.cargo-checksum.json vendored Normal file
View File

@@ -0,0 +1 @@
{"files":{"Cargo.lock":"8f97925261cab7b333ded16a2a4581780e582424265c70f8fb4ce5c2e853fea2","Cargo.toml":"3a6a9da1af33d8e46f219be78ce7cf78239c8b3129bfef9c7afb32bb91df4f01","LICENSE-APACHE":"3708458dee7f359ac6c9c5558023ed481be87e5372c43ebd7f2ca7ad23c12026","LICENSE-MIT":"0e6c282dfcb08a031c37b8f851fe5813fad63612e88a29dbe5121d727d67bbf2","examples/display/main.rs":"228d3179003c13e145dfa88a89c8f099ab8676f0a9d0a39855e8519a57670356","examples/export/main.rs":"70adda5950810c0dce0075cc2843a0518b803dfe53f34a253d4844d30b2c5c67","examples/roundtrip/main.rs":"c7be8701edcd9c8967c1deef4592a41512d458f638973865ce1e88d2f889b0ee","examples/tree/main.rs":"6db53ce5d7d3d36a76ccddfeffe3151c869b1cf92b13dbc0949d621f6a8acde3","src/accessor/mod.rs":"d067a00c0bd356a2983446bec454a8ccac0c857645f3192ce7205ae648f7a9bd","src/accessor/sparse.rs":"f4463d5aad4fcb385374894a3269cd4c2dd0d2853b2baee7f7d97653d60b65ad","src/accessor/util.rs":"6f26ae3aeed6baf0750bffc98f3c1644ea0259d1f0ae8234a535f44df3afc920","src/animation/iter.rs":"4040cf688f328e3cdbeef7154947dc5aeb1d495d87f0d2a0349d3b9653e4f776","src/animation/mod.rs":"eb106f24ffca78653e06650ffcae526a2770df497b99eab3062c54d2fc4df8ba","src/animation/util/mod.rs":"f0ec49b85d043142c6fab747fcef6b3c66d09535907939c68ec8049495bdff39","src/animation/util/morph_target_weights.rs":"73df55339327b414bc7b58d9eed2cd8b2236096e5f2f1e2b02493fcbab41e1c0","src/animation/util/rotations.rs":"7ed7a58f14df817647159a895acf9203805e5fed3769a73bc957ca4e759d54eb","src/binary.rs":"a35feec2207d4432df9af9b8b017544e610522c08b579e1b781be4946365b3c5","src/buffer.rs":"b3bfc801dc64a426c3a1b63ae6d01c4556b59b7c3e119a686b8378b02d07231a","src/camera.rs":"d71026aab2de5ec99bddc48f3ce7c434c2fa24b997b64553656c9fffa12d09a3","src/image.rs":"3cd652546d8e708a72c0489e30c70ed36ea9337a3b7ffd2ee5d1816c84b4db50","src/import.rs":"47ad672ae06a8a5d8d15e50460ae546b99f0b2ba6107f691ba84f1ecbaefbd8c","src/iter.rs":"3994853364bd997d6a3d9bda4b1435f5a021129aae64e76fd7595e1b0629229c","src/khr_lights_punctual.rs":"f16a1c8b252486f353efcf8db68649012cb631f5e7c0e14db6fdcd4effdacd16","src/khr_materials_variants.rs":"46050a2a1a9134a73c93f1ecd41a86948a4f059a174ec6ee9ca831e71252bac4","src/lib.rs":"18f7f12e5408405a53ceb0ee7aeb2bc4a3b30c0fd373f67d2f1de1cd66c02403","src/material.rs":"8fe20e232a20ccd2605105d552198d41ff01b64a6c42fa1d048621248dd1cf58","src/math.rs":"1c53af4f9a06ea49767f3826066282b0d41f51080d53872fa8c7871249820938","src/mesh/iter.rs":"49568e99d939785eaf63ba0f6f4fc53b5036682c22932920eec9b44faa9aff92","src/mesh/mod.rs":"08c23941060902ab80238cbcad36484cca85bbf9f5e75520b39c515004278ba1","src/mesh/util/colors.rs":"8b80988fbb5b0aba8f4bb7f6ab6d5f51588f3ec941cb71ea9d632fb11eafea81","src/mesh/util/indices.rs":"b6d0d40aabbb2e8ebd2ba415ac03b880f88ea8f3256aa69744822f035a003b34","src/mesh/util/joints.rs":"faf4a7a3616a6b050573413fcf837d509ede296e124dd1d251c6ef3d79af6609","src/mesh/util/mod.rs":"39898889dbb7e565c997018597495844c09c26fb34013bd190723ac91147a023","src/mesh/util/tex_coords.rs":"054333161b89b358716cbcbde8a88820ea990861273146daffa5112d446cb116","src/mesh/util/weights.rs":"f1fda5147afd5139eb6abf02a5b2f9358b0befa61d680c8ffb01034a58101697","src/scene/iter.rs":"d2fbba4273f4468b58b35e1c9ad67d80857dbcff83cbd75c87553b34170293e6","src/scene/mod.rs":"2d646a22c85f1a478dd27b82fd2173c9ec987644ee1c6b29ec47e5be121d44ed","src/skin/iter.rs":"ed5bfe84589fcafe06641ee05c74c16fa02c39b3256a2f4a90ad82297ac5a349","src/skin/mod.rs":"85909bb827c28b9148e79d4d2c74e3811cdfe1bf3f016c6709f024f0d4bf6ba8","src/skin/util.rs":"167d184ae21694fff4368c3f5b15d626a9c82ef78bd3f7227eabc8e72d7c77ca","src/texture.rs":"2620bb9f42a130388605dd70570dfe82eaf2ddf09597a258090a3fb688f77eea","tests/import_sample_models.rs":"909871db5a6bf97c4071848ff6703f0a9c972c0137e8b046b3f6d8936ba79125","tests/roundtrip_binary_gltf.rs":"cda55659f27211c45509720f7f4074f4cc0387e1c3d4b96df16361af5ce4dad4","tests/test_wrapper.rs":"2723e981cf7287e8b455ea3ad7d02f61eb13d32764d5f6fcd783ef968ccdacda"},"package":"e3ce1918195723ce6ac74e80542c5a96a40c2b26162c1957a5cd70799b8cacf7"}

294
vendor/gltf/Cargo.lock generated vendored Normal file
View File

@@ -0,0 +1,294 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
#[cfg_attr(feature = "extras", serde(skip_serializing_if = "Extras::is_empty"))]This file is automatically @generated by Cargo.
#[cfg_attr(feature = "extras", serde(skip_serializing_if = "Extras::is_empty"))]It is not intended for manual editing.
version = 3
[[package]]
name = "adler"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
[[package]]
name = "approx"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cab112f0a86d568ea0e627cc1d6be74a1e9cd55214684db5561995f6dad897c6"
dependencies = [
"num-traits",
]
[[package]]
name = "autocfg"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "base64"
version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
[[package]]
name = "bitflags"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bytemuck"
version = "1.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17febce684fd15d89027105661fec94afb475cb995fbc59d2865198446ba2eea"
[[package]]
name = "byteorder"
version = "1.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "crc32fast"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"
dependencies = [
"cfg-if",
]
[[package]]
name = "fdeflate"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d329bdeac514ee06249dabc27877490f17f5d371ec693360768b838e19f3ae10"
dependencies = [
"simd-adler32",
]
[[package]]
name = "flate2"
version = "1.0.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8a2db397cb1c8772f31494cb8917e48cd1e64f0fa7efac59fbd741a0a8ce841"
dependencies = [
"crc32fast",
"miniz_oxide 0.6.2",
]
[[package]]
name = "gltf"
version = "1.4.1"
dependencies = [
"approx",
"base64",
"byteorder",
"gltf-json",
"image",
"lazy_static",
"serde_json",
"urlencoding",
]
[[package]]
name = "gltf-derive"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "14070e711538afba5d6c807edb74bcb84e5dbb9211a3bf5dea0dfab5b24f4c51"
dependencies = [
"inflections",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "gltf-json"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6176f9d60a7eab0a877e8e96548605dedbde9190a7ae1e80bbcc1c9af03ab14"
dependencies = [
"gltf-derive",
"serde",
"serde_derive",
"serde_json",
]
[[package]]
name = "image"
version = "0.25.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd54d660e773627692c524beaad361aca785a4f9f5730ce91f42aabe5bce3d11"
dependencies = [
"bytemuck",
"byteorder",
"num-traits",
"png",
"zune-core",
"zune-jpeg",
]
[[package]]
name = "inflections"
version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a257582fdcde896fd96463bf2d40eefea0580021c0712a0e2b028b60b47a837a"
[[package]]
name = "itoa"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6"
[[package]]
name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "miniz_oxide"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa"
dependencies = [
"adler",
]
[[package]]
name = "miniz_oxide"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7"
dependencies = [
"adler",
"simd-adler32",
]
[[package]]
name = "num-traits"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd"
dependencies = [
"autocfg",
]
[[package]]
name = "png"
version = "0.17.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aaeebc51f9e7d2c150d3f3bfeb667f2aa985db5ef1e3d212847bdedb488beeaa"
dependencies = [
"bitflags",
"crc32fast",
"fdeflate",
"flate2",
"miniz_oxide 0.7.1",
]
[[package]]
name = "proc-macro2"
version = "1.0.74"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2de98502f212cfcea8d0bb305bd0f49d7ebdd75b64ba0a68f937d888f4e0d6db"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef"
dependencies = [
"proc-macro2",
]
[[package]]
name = "ryu"
version = "1.0.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041"
[[package]]
name = "serde"
version = "1.0.196"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "870026e60fa08c69f064aa766c10f10b1d62db9ccd4d0abb206472bee0ce3b32"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.196"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33c85360c95e7d137454dc81d9a4ed2b8efd8fbe19cee57357b32b9771fccb67"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "serde_json"
version = "1.0.96"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1"
dependencies = [
"itoa",
"ryu",
"serde",
]
[[package]]
name = "simd-adler32"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "238abfbb77c1915110ad968465608b68e869e0772622c9656714e73e5a1a522f"
[[package]]
name = "syn"
version = "2.0.46"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89456b690ff72fddcecf231caedbe615c59480c93358a93dfae7fc29e3ebbf0e"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "unicode-ident"
version = "1.0.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5464a87b239f13a63a501f2701565754bae92d243d4bb7eb12f6d57d2269bf4"
[[package]]
name = "urlencoding"
version = "2.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8db7427f936968176eaa7cdf81b7f98b980b18495ec28f1b5791ac3bfe3eea9"
[[package]]
name = "zune-core"
version = "0.4.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f423a2c17029964870cfaabb1f13dfab7d092a62a29a89264f4d36990ca414a"
[[package]]
name = "zune-jpeg"
version = "0.4.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec866b44a2a1fd6133d363f073ca1b179f438f99e7e5bfb1e33f7181facfe448"
dependencies = [
"zune-core",
]

122
vendor/gltf/Cargo.toml vendored Normal file
View File

@@ -0,0 +1,122 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies.
#
# If you are reading this file be aware that the original Cargo.toml
# will likely look very different (and much more reasonable).
# See Cargo.toml.orig for the original contents.
[package]
edition = "2021"
rust-version = "1.61"
name = "gltf"
version = "1.4.1"
authors = ["David Harvey-Macaulay <alteous@outlook.com>"]
include = [
"**/*.rs",
"Cargo.toml",
"LICENSE-*",
]
description = "glTF 2.0 loader"
homepage = "https://github.com/gltf-rs/gltf"
documentation = "https://docs.rs/gltf"
readme = "README.md"
keywords = [
"glTF",
"3D",
"asset",
"model",
"scene",
]
license = "MIT OR Apache-2.0"
repository = "https://github.com/gltf-rs/gltf"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = [
"--cfg",
"docsrs",
]
[[example]]
name = "gltf-display"
path = "examples/display/main.rs"
[[example]]
name = "gltf-export"
path = "examples/export/main.rs"
[[example]]
name = "gltf-roundtrip"
path = "examples/roundtrip/main.rs"
[[example]]
name = "gltf-tree"
path = "examples/tree/main.rs"
[dependencies.base64]
version = "0.13"
optional = true
[dependencies.byteorder]
version = "1.3"
[dependencies.gltf-json]
version = "=1.4.1"
[dependencies.image]
version = "0.25"
features = [
"jpeg",
"png",
]
optional = true
default-features = false
[dependencies.lazy_static]
version = "1"
[dependencies.serde_json]
version = "1.0"
features = ["raw_value"]
[dependencies.urlencoding]
version = "2.1"
optional = true
[dev-dependencies.approx]
version = "0.5"
[features]
KHR_lights_punctual = ["gltf-json/KHR_lights_punctual"]
KHR_materials_emissive_strength = ["gltf-json/KHR_materials_emissive_strength"]
KHR_materials_ior = ["gltf-json/KHR_materials_ior"]
KHR_materials_pbrSpecularGlossiness = ["gltf-json/KHR_materials_pbrSpecularGlossiness"]
KHR_materials_specular = ["gltf-json/KHR_materials_specular"]
KHR_materials_transmission = ["gltf-json/KHR_materials_transmission"]
KHR_materials_unlit = ["gltf-json/KHR_materials_unlit"]
KHR_materials_variants = ["gltf-json/KHR_materials_variants"]
KHR_materials_volume = ["gltf-json/KHR_materials_volume"]
KHR_texture_transform = ["gltf-json/KHR_texture_transform"]
allow_empty_texture = ["gltf-json/allow_empty_texture"]
default = [
"import",
"utils",
"names",
]
extensions = ["gltf-json/extensions"]
extras = ["gltf-json/extras"]
guess_mime_type = []
import = [
"base64",
"image",
"urlencoding",
]
names = ["gltf-json/names"]
utils = []
[badges.travis-ci]
repository = "gltf-rs/gltf"

202
vendor/gltf/LICENSE-APACHE vendored Normal file
View File

@@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

26
vendor/gltf/LICENSE-MIT vendored Normal file
View File

@@ -0,0 +1,26 @@
Copyright (c) 2017 The gltf Library Developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

20
vendor/gltf/examples/display/main.rs vendored Normal file
View File

@@ -0,0 +1,20 @@
use std::{fs, io};
use std::boxed::Box;
use std::error::Error as StdError;
fn run(path: &str) -> Result<(), Box<dyn StdError>> {
let file = fs::File::open(path)?;
let reader = io::BufReader::new(file);
let gltf = gltf::Gltf::from_reader(reader)?;
println!("{:#?}", gltf);
Ok(())
}
fn main() {
if let Some(path) = std::env::args().nth(1) {
run(&path).expect("runtime error");
} else {
println!("usage: gltf-display <FILE>");
}
}

203
vendor/gltf/examples/export/main.rs vendored Normal file
View File

@@ -0,0 +1,203 @@
use gltf_json as json;
use std::{fs, mem};
use json::validation::Checked::Valid;
use json::validation::USize64;
use std::borrow::Cow;
use std::io::Write;
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
enum Output {
/// Output standard glTF.
Standard,
/// Output binary glTF.
Binary,
}
#[derive(Copy, Clone, Debug)]
#[repr(C)]
struct Vertex {
position: [f32; 3],
color: [f32; 3],
}
/// Calculate bounding coordinates of a list of vertices, used for the clipping distance of the model
fn bounding_coords(points: &[Vertex]) -> ([f32; 3], [f32; 3]) {
let mut min = [f32::MAX, f32::MAX, f32::MAX];
let mut max = [f32::MIN, f32::MIN, f32::MIN];
for point in points {
let p = point.position;
for i in 0..3 {
min[i] = f32::min(min[i], p[i]);
max[i] = f32::max(max[i], p[i]);
}
}
(min, max)
}
fn align_to_multiple_of_four(n: &mut usize) {
*n = (*n + 3) & !3;
}
fn to_padded_byte_vector<T>(vec: Vec<T>) -> Vec<u8> {
let byte_length = vec.len() * mem::size_of::<T>();
let byte_capacity = vec.capacity() * mem::size_of::<T>();
let alloc = vec.into_boxed_slice();
let ptr = Box::<[T]>::into_raw(alloc) as *mut u8;
let mut new_vec = unsafe { Vec::from_raw_parts(ptr, byte_length, byte_capacity) };
while new_vec.len() % 4 != 0 {
new_vec.push(0); // pad to multiple of four bytes
}
new_vec
}
fn export(output: Output) {
let triangle_vertices = vec![
Vertex {
position: [0.0, 0.5, 0.0],
color: [1.0, 0.0, 0.0],
},
Vertex {
position: [-0.5, -0.5, 0.0],
color: [0.0, 1.0, 0.0],
},
Vertex {
position: [0.5, -0.5, 0.0],
color: [0.0, 0.0, 1.0],
},
];
let (min, max) = bounding_coords(&triangle_vertices);
let mut root = gltf_json::Root::default();
let buffer_length = triangle_vertices.len() * mem::size_of::<Vertex>();
let buffer = root.push(json::Buffer {
byte_length: USize64::from(buffer_length),
extensions: Default::default(),
extras: Default::default(),
name: None,
uri: if output == Output::Standard {
Some("buffer0.bin".into())
} else {
None
},
});
let buffer_view = root.push(json::buffer::View {
buffer,
byte_length: USize64::from(buffer_length),
byte_offset: None,
byte_stride: Some(json::buffer::Stride(mem::size_of::<Vertex>())),
extensions: Default::default(),
extras: Default::default(),
name: None,
target: Some(Valid(json::buffer::Target::ArrayBuffer)),
});
let positions = root.push(json::Accessor {
buffer_view: Some(buffer_view),
byte_offset: Some(USize64(0)),
count: USize64::from(triangle_vertices.len()),
component_type: Valid(json::accessor::GenericComponentType(
json::accessor::ComponentType::F32,
)),
extensions: Default::default(),
extras: Default::default(),
type_: Valid(json::accessor::Type::Vec3),
min: Some(json::Value::from(Vec::from(min))),
max: Some(json::Value::from(Vec::from(max))),
name: None,
normalized: false,
sparse: None,
});
let colors = root.push(json::Accessor {
buffer_view: Some(buffer_view),
byte_offset: Some(USize64::from(3 * mem::size_of::<f32>())),
count: USize64::from(triangle_vertices.len()),
component_type: Valid(json::accessor::GenericComponentType(
json::accessor::ComponentType::F32,
)),
extensions: Default::default(),
extras: Default::default(),
type_: Valid(json::accessor::Type::Vec3),
min: None,
max: None,
name: None,
normalized: false,
sparse: None,
});
let primitive = json::mesh::Primitive {
attributes: {
let mut map = std::collections::BTreeMap::new();
map.insert(Valid(json::mesh::Semantic::Positions), positions);
map.insert(Valid(json::mesh::Semantic::Colors(0)), colors);
map
},
extensions: Default::default(),
extras: Default::default(),
indices: None,
material: None,
mode: Valid(json::mesh::Mode::Triangles),
targets: None,
};
let mesh = root.push(json::Mesh {
extensions: Default::default(),
extras: Default::default(),
name: None,
primitives: vec![primitive],
weights: None,
});
let node = root.push(json::Node {
mesh: Some(mesh),
..Default::default()
});
root.push(json::Scene {
extensions: Default::default(),
extras: Default::default(),
name: None,
nodes: vec![node],
});
match output {
Output::Standard => {
let _ = fs::create_dir("triangle");
let writer = fs::File::create("triangle/triangle.gltf").expect("I/O error");
json::serialize::to_writer_pretty(writer, &root).expect("Serialization error");
let bin = to_padded_byte_vector(triangle_vertices);
let mut writer = fs::File::create("triangle/buffer0.bin").expect("I/O error");
writer.write_all(&bin).expect("I/O error");
}
Output::Binary => {
let json_string = json::serialize::to_string(&root).expect("Serialization error");
let mut json_offset = json_string.len();
align_to_multiple_of_four(&mut json_offset);
let glb = gltf::binary::Glb {
header: gltf::binary::Header {
magic: *b"glTF",
version: 2,
// N.B., the size of binary glTF file is limited to range of `u32`.
length: (json_offset + buffer_length)
.try_into()
.expect("file size exceeds binary glTF limit"),
},
bin: Some(Cow::Owned(to_padded_byte_vector(triangle_vertices))),
json: Cow::Owned(json_string.into_bytes()),
};
let writer = std::fs::File::create("triangle.glb").expect("I/O error");
glb.to_writer(writer).expect("glTF binary output error");
}
}
}
fn main() {
export(Output::Standard);
export(Output::Binary);
}

21
vendor/gltf/examples/roundtrip/main.rs vendored Normal file
View File

@@ -0,0 +1,21 @@
use std::{fs, io};
use std::boxed::Box;
use std::error::Error as StdError;
fn run(path: &str) -> Result<(), Box<dyn StdError>> {
let file = fs::File::open(path)?;
let reader = io::BufReader::new(file);
let gltf = gltf::Gltf::from_reader(reader)?;
let json = gltf.document.into_json().to_string_pretty()?;
println!("{}", json);
Ok(())
}
fn main() {
if let Some(path) = std::env::args().nth(1) {
run(&path).expect("runtime error");
} else {
println!("usage: gltf-roundtrip <FILE>");
}
}

42
vendor/gltf/examples/tree/main.rs vendored Normal file
View File

@@ -0,0 +1,42 @@
use std::boxed::Box;
use std::error::Error as StdError;
use std::{fs, io};
fn print_tree(node: &gltf::Node, depth: i32) {
for _ in 0..(depth - 1) {
print!(" ");
}
print!(" -");
print!(" Node {}", node.index());
#[cfg(feature = "names")]
print!(" ({})", node.name().unwrap_or("<Unnamed>"));
println!();
for child in node.children() {
print_tree(&child, depth + 1);
}
}
fn run(path: &str) -> Result<(), Box<dyn StdError>> {
let file = fs::File::open(path)?;
let reader = io::BufReader::new(file);
let gltf = gltf::Gltf::from_reader(reader)?;
for scene in gltf.scenes() {
print!("Scene {}", scene.index());
#[cfg(feature = "names")]
print!(" ({})", scene.name().unwrap_or("<Unnamed>"));
println!();
for node in scene.nodes() {
print_tree(&node, 1);
}
}
Ok(())
}
fn main() {
if let Some(path) = std::env::args().nth(1) {
run(&path).expect("runtime error");
} else {
println!("usage: gltf-tree <FILE>");
}
}

200
vendor/gltf/src/accessor/mod.rs vendored Normal file
View File

@@ -0,0 +1,200 @@
//! # Basic usage
//!
//! Visiting the accessors of a glTF asset.
//!
//! ```
//! # fn run() -> Result<(), Box<dyn std::error::Error>> {
//! # let gltf = gltf::Gltf::open("examples/Box.gltf")?;
//! for accessor in gltf.accessors() {
//! println!("Accessor #{}", accessor.index());
//! println!("offset: {:?}", accessor.offset());
//! println!("count: {}", accessor.count());
//! println!("data_type: {:?}", accessor.data_type());
//! println!("dimensions: {:?}", accessor.dimensions());
//! }
//! # Ok(())
//! # }
//! # fn main() {
//! # let _ = run().expect("runtime error");
//! # }
//! ```
//!
//! # Utility functions
//!
//! Reading the values from the `vec3` accessors of a glTF asset.
//!
//! ## Note
//!
//! The [`Iter`] utility is a low-level iterator intended for use in special
//! cases. The average user is expected to use reader abstractions such as
//! [`mesh::Reader`].
//!
//! [`Iter`]: struct.Iter.html
//! [`mesh::Reader`]: ../mesh/struct.Reader.html
//!
//! ```
//! # fn run() -> Result<(), Box<dyn std::error::Error>> {
//! # use gltf::accessor::{DataType, Dimensions, Iter};
//! let (gltf, buffers, _) = gltf::import("examples/Box.gltf")?;
//! let get_buffer_data = |buffer: gltf::Buffer| buffers.get(buffer.index()).map(|x| &*x.0);
//! for accessor in gltf.accessors() {
//! match (accessor.data_type(), accessor.dimensions()) {
//! (DataType::F32, Dimensions::Vec3) => {
//! if let Some(iter) = Iter::<[f32; 3]>::new(accessor, get_buffer_data) {
//! for item in iter {
//! println!("{:?}", item);
//! }
//! }
//! }
//! _ => {},
//! }
//! }
//! # Ok(())
//! # }
//! # fn main() {
//! # let _ = run().expect("runtime error");
//! # }
//! ```
use crate::{buffer, Document};
pub use json::accessor::ComponentType as DataType;
pub use json::accessor::Type as Dimensions;
#[cfg(feature = "extensions")]
use serde_json::{Map, Value};
/// Utility functions.
#[cfg(feature = "utils")]
#[cfg_attr(docsrs, doc(cfg(feature = "utils")))]
pub mod util;
/// Contains data structures for sparse storage.
pub mod sparse;
#[cfg(feature = "utils")]
#[doc(inline)]
pub use self::util::{Item, Iter};
/// A typed view into a buffer view.
#[derive(Clone, Debug)]
pub struct Accessor<'a> {
/// The parent `Document` struct.
document: &'a Document,
/// The corresponding JSON index.
index: usize,
/// The corresponding JSON struct.
json: &'a json::accessor::Accessor,
}
impl<'a> Accessor<'a> {
/// Constructs an `Accessor`.
pub(crate) fn new(
document: &'a Document,
index: usize,
json: &'a json::accessor::Accessor,
) -> Self {
Self {
document,
index,
json,
}
}
/// Returns the internal JSON index.
pub fn index(&self) -> usize {
self.index
}
/// Returns the size of each component that this accessor describes.
pub fn size(&self) -> usize {
self.data_type().size() * self.dimensions().multiplicity()
}
/// Returns the buffer view this accessor reads from.
///
/// This may be `None` if the corresponding accessor is sparse.
pub fn view(&self) -> Option<buffer::View<'a>> {
self.json
.buffer_view
.map(|view| self.document.views().nth(view.value()).unwrap())
}
/// Returns the offset relative to the start of the parent buffer view in bytes.
///
/// This will be 0 if the corresponding accessor is sparse.
pub fn offset(&self) -> usize {
// TODO: Change this function to return Option<usize> in the next
// version and return None for sparse accessors.
self.json.byte_offset.unwrap_or_default().0 as usize
}
/// Returns the number of components within the buffer view - not to be confused
/// with the number of bytes in the buffer view.
pub fn count(&self) -> usize {
self.json.count.0 as usize
}
/// Returns the data type of components in the attribute.
pub fn data_type(&self) -> DataType {
self.json.component_type.unwrap().0
}
/// Returns extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extensions(&self) -> Option<&Map<String, Value>> {
let ext = self.json.extensions.as_ref()?;
Some(&ext.others)
}
/// Queries extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extension_value(&self, ext_name: &str) -> Option<&Value> {
let ext = self.json.extensions.as_ref()?;
ext.others.get(ext_name)
}
/// Optional application specific data.
pub fn extras(&self) -> &'a json::Extras {
&self.json.extras
}
/// Specifies if the attribute is a scalar, vector, or matrix.
pub fn dimensions(&self) -> Dimensions {
self.json.type_.unwrap()
}
/// Returns the minimum value of each component in this attribute.
pub fn min(&self) -> Option<json::Value> {
self.json.min.clone()
}
/// Returns the maximum value of each component in this attribute.
pub fn max(&self) -> Option<json::Value> {
self.json.max.clone()
}
/// Optional user-defined name for this object.
#[cfg(feature = "names")]
#[cfg_attr(docsrs, doc(cfg(feature = "names")))]
pub fn name(&self) -> Option<&'a str> {
self.json.name.as_deref()
}
/// Specifies whether integer data values should be normalized.
pub fn normalized(&self) -> bool {
self.json.normalized
}
/// Returns sparse storage of attributes that deviate from their initialization
/// value.
pub fn sparse(&self) -> Option<sparse::Sparse<'a>> {
self.json
.sparse
.as_ref()
.map(|json| sparse::Sparse::new(self.document, json))
}
}

143
vendor/gltf/src/accessor/sparse.rs vendored Normal file
View File

@@ -0,0 +1,143 @@
use crate::{buffer, Document};
/// The index data type.
#[derive(Clone, Debug)]
pub enum IndexType {
/// Corresponds to `GL_UNSIGNED_BYTE`.
U8 = 5121,
/// Corresponds to `GL_UNSIGNED_SHORT`.
U16 = 5123,
/// Corresponds to `GL_UNSIGNED_INT`.
U32 = 5125,
}
/// Indices of those attributes that deviate from their initialization value.
pub struct Indices<'a> {
/// The parent `Document` struct.
document: &'a Document,
/// The corresponding JSON struct.
json: &'a json::accessor::sparse::Indices,
}
impl<'a> Indices<'a> {
/// Constructs `sparse::Indices`.
pub(crate) fn new(document: &'a Document, json: &'a json::accessor::sparse::Indices) -> Self {
Self { document, json }
}
/// Returns the buffer view containing the sparse indices.
pub fn view(&self) -> buffer::View<'a> {
self.document
.views()
.nth(self.json.buffer_view.value())
.unwrap()
}
/// The offset relative to the start of the parent buffer view in bytes.
pub fn offset(&self) -> usize {
self.json.byte_offset.0 as usize
}
/// The data type of each index.
pub fn index_type(&self) -> IndexType {
match self.json.component_type.unwrap().0 {
json::accessor::ComponentType::U8 => IndexType::U8,
json::accessor::ComponentType::U16 => IndexType::U16,
json::accessor::ComponentType::U32 => IndexType::U32,
_ => unreachable!(),
}
}
/// Optional application specific data.
pub fn extras(&self) -> &'a json::Extras {
&self.json.extras
}
}
/// Sparse storage of attributes that deviate from their initialization value.
pub struct Sparse<'a> {
/// The parent `Document` struct.
document: &'a Document,
/// The corresponding JSON struct.
json: &'a json::accessor::sparse::Sparse,
}
impl<'a> Sparse<'a> {
/// Constructs `Sparse`.
pub(crate) fn new(document: &'a Document, json: &'a json::accessor::sparse::Sparse) -> Self {
Self { document, json }
}
/// Returns the number of attributes encoded in this sparse accessor.
pub fn count(&self) -> usize {
self.json.count.0 as usize
}
/// Returns an index array of size `count` that points to those accessor
/// attributes that deviate from their initialization value.
pub fn indices(&self) -> Indices<'a> {
Indices::new(self.document, &self.json.indices)
}
/// Returns an array of size `count * number_of_components`, storing the
/// displaced accessor attributes pointed by `indices`.
pub fn values(&self) -> Values<'a> {
Values::new(self.document, &self.json.values)
}
/// Optional application specific data.
pub fn extras(&self) -> &'a json::Extras {
&self.json.extras
}
}
/// Array of size `count * number_of_components` storing the displaced accessor
/// attributes pointed by `accessor::sparse::Indices`.
pub struct Values<'a> {
/// The parent `Document` struct.
document: &'a Document,
/// The corresponding JSON struct.
json: &'a json::accessor::sparse::Values,
}
impl<'a> Values<'a> {
/// Constructs `sparse::Values`.
pub(crate) fn new(document: &'a Document, json: &'a json::accessor::sparse::Values) -> Self {
Self { document, json }
}
/// Returns the buffer view containing the sparse values.
pub fn view(&self) -> buffer::View<'a> {
self.document
.views()
.nth(self.json.buffer_view.value())
.unwrap()
}
/// The offset relative to the start of the parent buffer view in bytes.
pub fn offset(&self) -> usize {
self.json.byte_offset.0 as usize
}
/// Optional application specific data.
pub fn extras(&self) -> &'a json::Extras {
&self.json.extras
}
}
impl IndexType {
/// Returns the number of bytes this value represents.
pub fn size(&self) -> usize {
use self::IndexType::*;
match *self {
U8 => 1,
U16 => 2,
U32 => 4,
}
}
}

454
vendor/gltf/src/accessor/util.rs vendored Normal file
View File

@@ -0,0 +1,454 @@
use byteorder::{ByteOrder, LE};
use std::marker::PhantomData;
use std::{iter, mem};
use crate::{accessor, buffer};
fn buffer_view_slice<'a, 's>(
view: buffer::View<'a>,
get_buffer_data: &dyn Fn(buffer::Buffer<'a>) -> Option<&'s [u8]>,
) -> Option<&'s [u8]> {
let start = view.offset();
let end = start + view.length();
get_buffer_data(view.buffer()).and_then(|slice| slice.get(start..end))
}
/// General iterator for an accessor.
#[derive(Clone, Debug)]
pub enum Iter<'a, T: Item> {
/// Standard accessor iterator.
Standard(ItemIter<'a, T>),
/// Iterator for accessor with sparse values.
Sparse(SparseIter<'a, T>),
}
impl<'a, T: Item> Iterator for Iter<'a, T> {
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
match self {
Iter::Standard(ref mut iter) => iter.next(),
Iter::Sparse(ref mut iter) => iter.next(),
}
}
fn nth(&mut self, nth: usize) -> Option<Self::Item> {
match self {
Iter::Standard(ref mut iter) => iter.nth(nth),
Iter::Sparse(ref mut iter) => iter.nth(nth),
}
}
fn last(self) -> Option<Self::Item> {
match self {
Iter::Standard(iter) => iter.last(),
Iter::Sparse(iter) => iter.last(),
}
}
fn count(self) -> usize {
match self {
Iter::Standard(iter) => iter.count(),
Iter::Sparse(iter) => iter.count(),
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
match self {
Iter::Standard(ref iter) => iter.size_hint(),
Iter::Sparse(ref iter) => iter.size_hint(),
}
}
}
impl<'a, T: Item> ExactSizeIterator for Iter<'a, T> {}
/// Iterator over indices of sparse accessor.
#[derive(Clone, Debug)]
pub enum SparseIndicesIter<'a> {
/// 8-bit indices.
U8(ItemIter<'a, u8>),
/// 16-bit indices.
U16(ItemIter<'a, u16>),
/// 32-bit indices.
U32(ItemIter<'a, u32>),
}
impl<'a> Iterator for SparseIndicesIter<'a> {
type Item = u32;
fn next(&mut self) -> Option<Self::Item> {
match *self {
SparseIndicesIter::U8(ref mut iter) => iter.next().map(|x| x as u32),
SparseIndicesIter::U16(ref mut iter) => iter.next().map(|x| x as u32),
SparseIndicesIter::U32(ref mut iter) => iter.next(),
}
}
}
/// Iterates over a sparse accessor.
#[derive(Clone, Debug)]
pub struct SparseIter<'a, T: Item> {
/// Base value iterator.
///
/// This can be `None` if the base buffer view is not set. In this case the base values are all zero.
base: Option<ItemIter<'a, T>>,
/// Number of values in the base accessor
///
/// Valid even when `base` is not set.
base_count: usize,
/// Sparse indices iterator.
indices: iter::Peekable<SparseIndicesIter<'a>>,
/// Sparse values iterator.
values: ItemIter<'a, T>,
/// Iterator counter.
counter: u32,
}
impl<'a, T: Item> SparseIter<'a, T> {
/// Constructor.
///
/// Here `base` is allowed to be `None` when the base buffer view is not explicitly specified.
pub fn new(
base: Option<ItemIter<'a, T>>,
indices: SparseIndicesIter<'a>,
values: ItemIter<'a, T>,
) -> Self {
Self::with_base_count(base, 0, indices, values)
}
/// Supplemental constructor.
pub fn with_base_count(
base: Option<ItemIter<'a, T>>,
base_count: usize,
indices: SparseIndicesIter<'a>,
values: ItemIter<'a, T>,
) -> Self {
Self {
base,
base_count,
indices: indices.peekable(),
values,
counter: 0,
}
}
}
impl<'a, T: Item> Iterator for SparseIter<'a, T> {
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
let mut next_value = if let Some(base) = self.base.as_mut() {
// If accessor.bufferView is set we let base decide when we have reached the end
// of the iteration sequence.
base.next()?
} else if (self.counter as usize) < self.base_count {
// Else, we continue iterating until we have generated the number of items
// specified by accessor.count
T::zero()
} else {
return None;
};
let next_sparse_index = self.indices.peek();
if let Some(index) = next_sparse_index {
if *index == self.counter {
self.indices.next(); // advance
next_value = self.values.next().unwrap();
}
}
self.counter += 1;
Some(next_value)
}
fn size_hint(&self) -> (usize, Option<usize>) {
let hint = self.base_count - (self.counter as usize).min(self.base_count);
(hint, Some(hint))
}
}
impl<'a, T: Item> ExactSizeIterator for SparseIter<'a, T> {}
/// Represents items that can be read by an [`Accessor`].
///
/// [`Accessor`]: struct.Accessor.html
pub trait Item {
/// Create an object of this type from a byte slice.
fn from_slice(slice: &[u8]) -> Self;
/// Create an object of this type that represents a zero value.
fn zero() -> Self;
}
/// Visits the items in an [`Accessor`].
///
/// [`Accessor`]: struct.Accessor.html
#[derive(Copy, Clone, Debug)]
pub struct ItemIter<'a, T: Item> {
stride: usize,
data: &'a [u8],
_phantom: PhantomData<T>,
}
impl Item for i8 {
fn from_slice(slice: &[u8]) -> Self {
slice[0] as i8
}
fn zero() -> Self {
0
}
}
impl Item for i16 {
fn from_slice(slice: &[u8]) -> Self {
LE::read_i16(slice)
}
fn zero() -> Self {
0
}
}
impl Item for u8 {
fn from_slice(slice: &[u8]) -> Self {
slice[0]
}
fn zero() -> Self {
0
}
}
impl Item for u16 {
fn from_slice(slice: &[u8]) -> Self {
LE::read_u16(slice)
}
fn zero() -> Self {
0
}
}
impl Item for u32 {
fn from_slice(slice: &[u8]) -> Self {
LE::read_u32(slice)
}
fn zero() -> Self {
0
}
}
impl Item for f32 {
fn from_slice(slice: &[u8]) -> Self {
LE::read_f32(slice)
}
fn zero() -> Self {
0.0
}
}
impl<T: Item + Copy> Item for [T; 2] {
fn from_slice(slice: &[u8]) -> Self {
assert!(slice.len() >= 2 * mem::size_of::<T>());
[
T::from_slice(slice),
T::from_slice(&slice[mem::size_of::<T>()..]),
]
}
fn zero() -> Self {
[T::zero(); 2]
}
}
impl<T: Item + Copy> Item for [T; 3] {
fn from_slice(slice: &[u8]) -> Self {
assert!(slice.len() >= 3 * mem::size_of::<T>());
[
T::from_slice(slice),
T::from_slice(&slice[mem::size_of::<T>()..]),
T::from_slice(&slice[2 * mem::size_of::<T>()..]),
]
}
fn zero() -> Self {
[T::zero(); 3]
}
}
impl<T: Item + Copy> Item for [T; 4] {
fn from_slice(slice: &[u8]) -> Self {
assert!(slice.len() >= 4 * mem::size_of::<T>());
[
T::from_slice(slice),
T::from_slice(&slice[mem::size_of::<T>()..]),
T::from_slice(&slice[2 * mem::size_of::<T>()..]),
T::from_slice(&slice[3 * mem::size_of::<T>()..]),
]
}
fn zero() -> Self {
[T::zero(); 4]
}
}
impl<'a, T: Item> ItemIter<'a, T> {
/// Constructor.
pub fn new(slice: &'a [u8], stride: usize) -> Self {
ItemIter {
data: slice,
stride,
_phantom: PhantomData,
}
}
}
impl<'a, 's, T: Item> Iter<'s, T> {
/// Constructor.
pub fn new<F>(accessor: super::Accessor<'a>, get_buffer_data: F) -> Option<Iter<'s, T>>
where
F: Clone + Fn(buffer::Buffer<'a>) -> Option<&'s [u8]>,
{
match accessor.sparse() {
Some(sparse) => {
// Using `if let` here instead of map to preserve the early return behavior.
let base_iter = if let Some(view) = accessor.view() {
let stride = view.stride().unwrap_or(mem::size_of::<T>());
let start = accessor.offset();
let end = start + stride * (accessor.count() - 1) + mem::size_of::<T>();
let subslice = buffer_view_slice(view, &get_buffer_data)
.and_then(|slice| slice.get(start..end))?;
Some(ItemIter::new(subslice, stride))
} else {
None
};
let base_count = accessor.count();
let indices = sparse.indices();
let values = sparse.values();
let sparse_count = sparse.count();
let index_iter = {
let view = indices.view();
let index_size = indices.index_type().size();
let stride = view.stride().unwrap_or(index_size);
let start = indices.offset();
let end = start + stride * (sparse_count - 1) + index_size;
let subslice = buffer_view_slice(view, &get_buffer_data)
.and_then(|slice| slice.get(start..end))?;
match indices.index_type() {
accessor::sparse::IndexType::U8 => {
SparseIndicesIter::U8(ItemIter::new(subslice, stride))
}
accessor::sparse::IndexType::U16 => {
SparseIndicesIter::U16(ItemIter::new(subslice, stride))
}
accessor::sparse::IndexType::U32 => {
SparseIndicesIter::U32(ItemIter::new(subslice, stride))
}
}
};
let value_iter = {
let view = values.view();
let stride = view.stride().unwrap_or(mem::size_of::<T>());
let start = values.offset();
let end = start + stride * (sparse_count - 1) + mem::size_of::<T>();
let subslice = buffer_view_slice(view, &get_buffer_data)
.and_then(|slice| slice.get(start..end))?;
ItemIter::new(subslice, stride)
};
Some(Iter::Sparse(SparseIter::with_base_count(
base_iter, base_count, index_iter, value_iter,
)))
}
None => {
debug_assert_eq!(mem::size_of::<T>(), accessor.size());
debug_assert!(mem::size_of::<T>() > 0);
accessor.view().and_then(|view| {
let stride = view.stride().unwrap_or(mem::size_of::<T>());
debug_assert!(
stride >= mem::size_of::<T>(),
"Mismatch in stride, expected at least {} stride but found {}",
mem::size_of::<T>(),
stride
);
let start = accessor.offset();
let end = start + stride * (accessor.count() - 1) + mem::size_of::<T>();
let subslice = buffer_view_slice(view, &get_buffer_data)
.and_then(|slice| slice.get(start..end))?;
Some(Iter::Standard(ItemIter {
stride,
data: subslice,
_phantom: PhantomData,
}))
})
}
}
}
}
impl<'a, T: Item> ExactSizeIterator for ItemIter<'a, T> {}
impl<'a, T: Item> Iterator for ItemIter<'a, T> {
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
let stride = if self.data.len() >= self.stride {
Some(self.stride)
} else if self.data.len() >= mem::size_of::<T>() {
Some(mem::size_of::<T>())
} else {
None
};
if let Some(stride) = stride {
let (val, data) = self.data.split_at(stride);
let val = T::from_slice(val);
self.data = data;
Some(val)
} else {
None
}
}
fn nth(&mut self, nth: usize) -> Option<Self::Item> {
if let Some(val_data) = self.data.get(nth * self.stride..) {
if val_data.len() >= mem::size_of::<T>() {
let val = T::from_slice(val_data);
self.data = &val_data[self.stride.min(val_data.len())..];
Some(val)
} else {
None
}
} else {
None
}
}
fn last(self) -> Option<Self::Item> {
if self.data.len() >= mem::size_of::<T>() {
self.data
.get((self.data.len() - 1) / self.stride * self.stride..)
.map(T::from_slice)
} else {
None
}
}
fn count(self) -> usize {
self.size_hint().0
}
fn size_hint(&self) -> (usize, Option<usize>) {
let hint = self.data.len() / self.stride
+ (self.data.len() % self.stride >= mem::size_of::<T>()) as usize;
(hint, Some(hint))
}
}

75
vendor/gltf/src/animation/iter.rs vendored Normal file
View File

@@ -0,0 +1,75 @@
use std::{iter, slice};
use crate::animation::{Animation, Channel, Sampler};
/// An `Iterator` that visits the channels of an animation.
#[derive(Clone, Debug)]
pub struct Channels<'a> {
/// The parent `Animation` struct.
pub(crate) anim: Animation<'a>,
/// The internal channel iterator.
pub(crate) iter: iter::Enumerate<slice::Iter<'a, json::animation::Channel>>,
}
/// An `Iterator` that visits the samplers of an animation.
#[derive(Clone, Debug)]
pub struct Samplers<'a> {
/// The parent `Channel` struct.
pub(crate) anim: Animation<'a>,
/// The internal channel iterator.
pub(crate) iter: iter::Enumerate<slice::Iter<'a, json::animation::Sampler>>,
}
impl<'a> Iterator for Channels<'a> {
type Item = Channel<'a>;
fn next(&mut self) -> Option<Self::Item> {
self.iter
.next()
.map(|(index, json)| Channel::new(self.anim.clone(), json, index))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
fn count(self) -> usize {
self.iter.count()
}
fn last(self) -> Option<Self::Item> {
let anim = self.anim;
self.iter
.last()
.map(|(index, json)| Channel::new(anim, json, index))
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter
.nth(n)
.map(|(index, json)| Channel::new(self.anim.clone(), json, index))
}
}
impl<'a> Iterator for Samplers<'a> {
type Item = Sampler<'a>;
fn next(&mut self) -> Option<Self::Item> {
self.iter
.next()
.map(|(index, json)| Sampler::new(self.anim.clone(), json, index))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
fn count(self) -> usize {
self.iter.count()
}
fn last(self) -> Option<Self::Item> {
let anim = self.anim;
self.iter
.last()
.map(|(index, json)| Sampler::new(anim, json, index))
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter
.nth(n)
.map(|(index, json)| Sampler::new(self.anim.clone(), json, index))
}
}

268
vendor/gltf/src/animation/mod.rs vendored Normal file
View File

@@ -0,0 +1,268 @@
use crate::{accessor, scene, Document};
#[cfg(feature = "utils")]
use crate::Buffer;
pub use json::animation::{Interpolation, Property};
#[cfg(feature = "extensions")]
use serde_json::{Map, Value};
/// Iterators.
pub mod iter;
/// Utility functions.
#[cfg(feature = "utils")]
#[cfg_attr(docsrs, doc(cfg(feature = "utils")))]
pub mod util;
#[cfg(feature = "utils")]
#[doc(inline)]
pub use self::util::Reader;
/// A keyframe animation.
#[derive(Clone, Debug)]
pub struct Animation<'a> {
/// The parent `Document` struct.
document: &'a Document,
/// The corresponding JSON index.
index: usize,
/// The corresponding JSON struct.
json: &'a json::animation::Animation,
}
/// Targets an animation's sampler at a node's property.
#[derive(Clone, Debug)]
pub struct Channel<'a> {
/// The parent `Animation` struct.
anim: Animation<'a>,
/// The corresponding JSON index.
index: usize,
/// The corresponding JSON struct.
json: &'a json::animation::Channel,
}
/// Defines a keyframe graph (but not its target).
#[derive(Clone, Debug)]
pub struct Sampler<'a> {
/// The parent `Animation` struct.
anim: Animation<'a>,
/// The corresponding JSON index.
index: usize,
/// The corresponding JSON struct.
json: &'a json::animation::Sampler,
}
/// The node and TRS property that an animation channel targets.
#[derive(Clone, Debug)]
pub struct Target<'a> {
/// The parent `Animation` struct.
anim: Animation<'a>,
/// The corresponding JSON struct.
json: &'a json::animation::Target,
}
impl<'a> Animation<'a> {
/// Constructs an `Animation`.
pub(crate) fn new(
document: &'a Document,
index: usize,
json: &'a json::animation::Animation,
) -> Self {
Self {
document,
index,
json,
}
}
/// Returns the internal JSON index.
pub fn index(&self) -> usize {
self.index
}
/// Optional application specific data.
pub fn extras(&self) -> &'a json::Extras {
&self.json.extras
}
/// Returns an `Iterator` over the animation channels.
///
/// Each channel targets an animation's sampler at a node's property.
pub fn channels(&self) -> iter::Channels<'a> {
iter::Channels {
anim: self.clone(),
iter: self.json.channels.iter().enumerate(),
}
}
/// Optional user-defined name for this object.
#[cfg(feature = "names")]
pub fn name(&self) -> Option<&'a str> {
self.json.name.as_deref()
}
/// Returns an `Iterator` over the animation samplers.
///
/// Each sampler combines input and output accessors with an
/// interpolation algorithm to define a keyframe graph (but not its target).
pub fn samplers(&self) -> iter::Samplers<'a> {
iter::Samplers {
anim: self.clone(),
iter: self.json.samplers.iter().enumerate(),
}
}
/// Returns extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extensions(&self) -> Option<&Map<String, Value>> {
let ext = self.json.extensions.as_ref()?;
Some(&ext.others)
}
/// Queries extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extension_value(&self, ext_name: &str) -> Option<&Value> {
let ext = self.json.extensions.as_ref()?;
ext.others.get(ext_name)
}
}
impl<'a> Channel<'a> {
/// Constructs a `Channel`.
pub(crate) fn new(
anim: Animation<'a>,
json: &'a json::animation::Channel,
index: usize,
) -> Self {
Self { anim, json, index }
}
/// Returns the parent `Animation` struct.
pub fn animation(&self) -> Animation<'a> {
self.anim.clone()
}
/// Returns the sampler in this animation used to compute the value for the
/// target.
pub fn sampler(&self) -> Sampler<'a> {
self.anim.samplers().nth(self.json.sampler.value()).unwrap()
}
/// Returns the node and property to target.
pub fn target(&self) -> Target<'a> {
Target::new(self.anim.clone(), &self.json.target)
}
/// Constructs an animation channel reader.
#[cfg(feature = "utils")]
#[cfg_attr(docsrs, doc(cfg(feature = "utils")))]
pub fn reader<'s, F>(&self, get_buffer_data: F) -> Reader<'a, 's, F>
where
F: Clone + Fn(Buffer<'a>) -> Option<&'s [u8]>,
{
Reader {
channel: self.clone(),
get_buffer_data,
}
}
/// Optional application specific data.
pub fn extras(&self) -> &'a json::Extras {
&self.json.extras
}
/// Returns the internal JSON index.
pub fn index(&self) -> usize {
self.index
}
}
impl<'a> Target<'a> {
/// Constructs a `Target`.
pub(crate) fn new(anim: Animation<'a>, json: &'a json::animation::Target) -> Self {
Self { anim, json }
}
/// Returns the parent `Animation` struct.
pub fn animation(&self) -> Animation<'a> {
self.anim.clone()
}
/// Optional application specific data.
pub fn extras(&self) -> &'a json::Extras {
&self.json.extras
}
/// Returns the target node.
pub fn node(&self) -> scene::Node<'a> {
self.anim
.document
.nodes()
.nth(self.json.node.value())
.unwrap()
}
/// Returns the node's property to modify or the 'weights' of the morph
/// targets it instantiates.
pub fn property(&self) -> Property {
self.json.path.unwrap()
}
}
impl<'a> Sampler<'a> {
/// Constructs a `Sampler`.
pub(crate) fn new(
anim: Animation<'a>,
json: &'a json::animation::Sampler,
index: usize,
) -> Self {
Self { anim, json, index }
}
/// Returns the parent `Animation` struct.
pub fn animation(&self) -> Animation<'a> {
self.anim.clone()
}
/// Optional application specific data.
pub fn extras(&self) -> &'a json::Extras {
&self.json.extras
}
/// Returns the internal JSON index.
pub fn index(&self) -> usize {
self.index
}
/// Returns the accessor containing the keyframe input values (e.g. time).
pub fn input(&self) -> accessor::Accessor<'a> {
self.anim
.document
.accessors()
.nth(self.json.input.value())
.unwrap()
}
/// Returns the keyframe interpolation algorithm.
pub fn interpolation(&self) -> Interpolation {
self.json.interpolation.unwrap()
}
/// Returns the accessor containing the keyframe output values.
pub fn output(&self) -> accessor::Accessor<'a> {
self.anim
.document
.accessors()
.nth(self.json.output.value())
.unwrap()
}
}

188
vendor/gltf/src/animation/util/mod.rs vendored Normal file
View File

@@ -0,0 +1,188 @@
/// Casting iterator adapters for rotations.
pub mod rotations;
/// Casting iterator adapters for morph target weights.
pub mod morph_target_weights;
use crate::accessor;
use crate::animation::Channel;
use crate::Buffer;
/// Animation input sampler values of type `f32`.
pub type ReadInputs<'a> = accessor::Iter<'a, f32>;
/// Animation output sampler values of type `[f32; 3]`.
pub type Translations<'a> = accessor::Iter<'a, [f32; 3]>;
/// Animation output sampler values of type `[f32; 3]`.
pub type Scales<'a> = accessor::Iter<'a, [f32; 3]>;
/// Animation channel reader.
#[derive(Clone, Debug)]
pub struct Reader<'a, 's, F>
where
F: Clone + Fn(Buffer<'a>) -> Option<&'s [u8]>,
{
pub(crate) channel: Channel<'a>,
pub(crate) get_buffer_data: F,
}
/// Rotation animations
#[derive(Clone, Debug)]
pub enum Rotations<'a> {
/// Rotations of type `[i8; 4]`.
I8(accessor::Iter<'a, [i8; 4]>),
/// Rotations of type `[u8; 4]`.
U8(accessor::Iter<'a, [u8; 4]>),
/// Rotations of type `[i16; 4]`.
I16(accessor::Iter<'a, [i16; 4]>),
/// Rotations of type `[u16; 4]`.
U16(accessor::Iter<'a, [u16; 4]>),
/// Rotations of type `[f32; 4]`.
F32(accessor::Iter<'a, [f32; 4]>),
}
/// Morph-target weight animations.
#[derive(Clone, Debug)]
pub enum MorphTargetWeights<'a> {
/// Weights of type `i8`.
I8(accessor::Iter<'a, i8>),
/// Weights of type `u8`.
U8(accessor::Iter<'a, u8>),
/// Weights of type `i16`.
I16(accessor::Iter<'a, i16>),
/// Weights of type `u16`.
U16(accessor::Iter<'a, u16>),
/// Weights of type `f32`.
F32(accessor::Iter<'a, f32>),
}
/// Animation output sampler values.
pub enum ReadOutputs<'a> {
/// XYZ translations of type `[f32; 3]`.
Translations(Translations<'a>),
/// Rotation animations.
Rotations(Rotations<'a>),
/// XYZ scales of type `[f32; 3]`.
Scales(Scales<'a>),
/// Morph target animations.
MorphTargetWeights(MorphTargetWeights<'a>),
}
impl<'a> Rotations<'a> {
/// Reinterpret rotations as u16. Lossy if underlying iterator yields u8,
/// i16, u16 or f32.
pub fn into_i8(self) -> rotations::CastingIter<'a, rotations::I8> {
rotations::CastingIter::new(self)
}
/// Reinterpret rotations as u16. Lossy if underlying iterator yields i16,
/// u16 or f32.
pub fn into_u8(self) -> rotations::CastingIter<'a, rotations::U8> {
rotations::CastingIter::new(self)
}
/// Reinterpret rotations as u16. Lossy if underlying iterator yields u16
/// or f32.
pub fn into_i16(self) -> rotations::CastingIter<'a, rotations::I16> {
rotations::CastingIter::new(self)
}
/// Reinterpret rotations as u16. Lossy if underlying iterator yields f32.
pub fn into_u16(self) -> rotations::CastingIter<'a, rotations::U16> {
rotations::CastingIter::new(self)
}
/// Reinterpret rotations as f32. Lossy if underlying iterator yields i16
/// or u16.
pub fn into_f32(self) -> rotations::CastingIter<'a, rotations::F32> {
rotations::CastingIter::new(self)
}
}
impl<'a> MorphTargetWeights<'a> {
/// Reinterpret morph weights as u16. Lossy if underlying iterator yields
/// u8, i16, u16 or f32.
pub fn into_i8(self) -> morph_target_weights::CastingIter<'a, morph_target_weights::I8> {
morph_target_weights::CastingIter::new(self)
}
/// Reinterpret morph weights as u16. Lossy if underlying iterator yields
/// i16, u16 or f32.
pub fn into_u8(self) -> morph_target_weights::CastingIter<'a, morph_target_weights::U8> {
morph_target_weights::CastingIter::new(self)
}
/// Reinterpret morph weights as u16. Lossy if underlying iterator yields
/// u16 or f32.
pub fn into_i16(self) -> morph_target_weights::CastingIter<'a, morph_target_weights::I16> {
morph_target_weights::CastingIter::new(self)
}
/// Reinterpret morph weights as u16. Lossy if underlying iterator yields
/// f32.
pub fn into_u16(self) -> morph_target_weights::CastingIter<'a, morph_target_weights::U16> {
morph_target_weights::CastingIter::new(self)
}
/// Reinterpret morph weights as f32. Lossy if underlying iterator yields
/// i16 or u16.
pub fn into_f32(self) -> morph_target_weights::CastingIter<'a, morph_target_weights::F32> {
morph_target_weights::CastingIter::new(self)
}
}
impl<'a, 's, F> Reader<'a, 's, F>
where
F: Clone + Fn(Buffer<'a>) -> Option<&'s [u8]>,
{
/// Visits the input samples of a channel.
pub fn read_inputs(&self) -> Option<ReadInputs<'s>> {
accessor::Iter::new(self.channel.sampler().input(), self.get_buffer_data.clone())
}
/// Visits the output samples of a channel.
pub fn read_outputs(&self) -> Option<ReadOutputs<'s>> {
use crate::animation::Property;
use accessor::{DataType, Iter};
let output = self.channel.sampler().output();
match self.channel.target().property() {
Property::Translation => {
Iter::new(output, self.get_buffer_data.clone()).map(ReadOutputs::Translations)
}
Property::Rotation => match output.data_type() {
DataType::I8 => Iter::new(output, self.get_buffer_data.clone())
.map(|x| ReadOutputs::Rotations(Rotations::I8(x))),
DataType::U8 => Iter::new(output, self.get_buffer_data.clone())
.map(|x| ReadOutputs::Rotations(Rotations::U8(x))),
DataType::I16 => Iter::new(output, self.get_buffer_data.clone())
.map(|x| ReadOutputs::Rotations(Rotations::I16(x))),
DataType::U16 => Iter::new(output, self.get_buffer_data.clone())
.map(|x| ReadOutputs::Rotations(Rotations::U16(x))),
DataType::F32 => Iter::new(output, self.get_buffer_data.clone())
.map(|x| ReadOutputs::Rotations(Rotations::F32(x))),
_ => unreachable!(),
},
Property::Scale => {
Iter::new(output, self.get_buffer_data.clone()).map(ReadOutputs::Scales)
}
Property::MorphTargetWeights => match output.data_type() {
DataType::I8 => Iter::new(output, self.get_buffer_data.clone())
.map(|x| ReadOutputs::MorphTargetWeights(MorphTargetWeights::I8(x))),
DataType::U8 => Iter::new(output, self.get_buffer_data.clone())
.map(|x| ReadOutputs::MorphTargetWeights(MorphTargetWeights::U8(x))),
DataType::I16 => Iter::new(output, self.get_buffer_data.clone())
.map(|x| ReadOutputs::MorphTargetWeights(MorphTargetWeights::I16(x))),
DataType::U16 => Iter::new(output, self.get_buffer_data.clone())
.map(|x| ReadOutputs::MorphTargetWeights(MorphTargetWeights::U16(x))),
DataType::F32 => Iter::new(output, self.get_buffer_data.clone())
.map(|x| ReadOutputs::MorphTargetWeights(MorphTargetWeights::F32(x))),
_ => unreachable!(),
},
}
}
}

View File

@@ -0,0 +1,231 @@
use super::MorphTargetWeights;
use crate::Normalize;
use std::marker::PhantomData;
/// Casting iterator for `MorphTargetWeights`.
#[derive(Clone, Debug)]
pub struct CastingIter<'a, T>(MorphTargetWeights<'a>, PhantomData<T>);
/// Type which describes how to cast any weight into i8.
#[derive(Clone, Debug)]
pub struct I8;
/// Type which describes how to cast any weight into u8.
#[derive(Clone, Debug)]
pub struct U8;
/// Type which describes how to cast any weight into i16.
#[derive(Clone, Debug)]
pub struct I16;
/// Type which describes how to cast any weight into u16.
#[derive(Clone, Debug)]
pub struct U16;
/// Type which describes how to cast any weight into f32.
#[derive(Clone, Debug)]
pub struct F32;
/// Trait for types which describe casting behaviour.
pub trait Cast {
/// Output type.
type Output;
/// Cast from i8.
fn cast_i8(x: i8) -> Self::Output;
/// Cast from u8.
fn cast_u8(x: u8) -> Self::Output;
/// Cast from i16.
fn cast_i16(x: i16) -> Self::Output;
/// Cast from u16.
fn cast_u16(x: u16) -> Self::Output;
/// Cast from f32.
fn cast_f32(x: f32) -> Self::Output;
}
impl<'a, A> CastingIter<'a, A> {
pub(crate) fn new(iter: MorphTargetWeights<'a>) -> Self {
CastingIter(iter, PhantomData)
}
/// Unwrap underlying `MorphTargetWeights` object.
pub fn unwrap(self) -> MorphTargetWeights<'a> {
self.0
}
}
impl<'a, A: Cast> ExactSizeIterator for CastingIter<'a, A> {}
impl<'a, A: Cast> Iterator for CastingIter<'a, A> {
type Item = A::Output;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
match self.0 {
MorphTargetWeights::I8(ref mut i) => i.next().map(A::cast_i8),
MorphTargetWeights::U8(ref mut i) => i.next().map(A::cast_u8),
MorphTargetWeights::I16(ref mut i) => i.next().map(A::cast_i16),
MorphTargetWeights::U16(ref mut i) => i.next().map(A::cast_u16),
MorphTargetWeights::F32(ref mut i) => i.next().map(A::cast_f32),
}
}
#[inline]
fn nth(&mut self, x: usize) -> Option<Self::Item> {
match self.0 {
MorphTargetWeights::I8(ref mut i) => i.nth(x).map(A::cast_i8),
MorphTargetWeights::U8(ref mut i) => i.nth(x).map(A::cast_u8),
MorphTargetWeights::I16(ref mut i) => i.nth(x).map(A::cast_i16),
MorphTargetWeights::U16(ref mut i) => i.nth(x).map(A::cast_u16),
MorphTargetWeights::F32(ref mut i) => i.nth(x).map(A::cast_f32),
}
}
fn last(self) -> Option<Self::Item> {
match self.0 {
MorphTargetWeights::I8(i) => i.last().map(A::cast_i8),
MorphTargetWeights::U8(i) => i.last().map(A::cast_u8),
MorphTargetWeights::I16(i) => i.last().map(A::cast_i16),
MorphTargetWeights::U16(i) => i.last().map(A::cast_u16),
MorphTargetWeights::F32(i) => i.last().map(A::cast_f32),
}
}
fn count(self) -> usize {
self.size_hint().0
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
match self.0 {
MorphTargetWeights::I8(ref i) => i.size_hint(),
MorphTargetWeights::U8(ref i) => i.size_hint(),
MorphTargetWeights::I16(ref i) => i.size_hint(),
MorphTargetWeights::U16(ref i) => i.size_hint(),
MorphTargetWeights::F32(ref i) => i.size_hint(),
}
}
}
impl Cast for I8 {
type Output = i8;
fn cast_i8(x: i8) -> Self::Output {
x.normalize()
}
fn cast_u8(x: u8) -> Self::Output {
x.normalize()
}
fn cast_i16(x: i16) -> Self::Output {
x.normalize()
}
fn cast_u16(x: u16) -> Self::Output {
x.normalize()
}
fn cast_f32(x: f32) -> Self::Output {
x.normalize()
}
}
impl Cast for U8 {
type Output = u8;
fn cast_i8(x: i8) -> Self::Output {
x.normalize()
}
fn cast_u8(x: u8) -> Self::Output {
x.normalize()
}
fn cast_i16(x: i16) -> Self::Output {
x.normalize()
}
fn cast_u16(x: u16) -> Self::Output {
x.normalize()
}
fn cast_f32(x: f32) -> Self::Output {
x.normalize()
}
}
impl Cast for I16 {
type Output = i16;
fn cast_i8(x: i8) -> Self::Output {
x.normalize()
}
fn cast_u8(x: u8) -> Self::Output {
x.normalize()
}
fn cast_i16(x: i16) -> Self::Output {
x.normalize()
}
fn cast_u16(x: u16) -> Self::Output {
x.normalize()
}
fn cast_f32(x: f32) -> Self::Output {
x.normalize()
}
}
impl Cast for U16 {
type Output = u16;
fn cast_i8(x: i8) -> Self::Output {
x.normalize()
}
fn cast_u8(x: u8) -> Self::Output {
x.normalize()
}
fn cast_i16(x: i16) -> Self::Output {
x.normalize()
}
fn cast_u16(x: u16) -> Self::Output {
x.normalize()
}
fn cast_f32(x: f32) -> Self::Output {
x.normalize()
}
}
impl Cast for F32 {
type Output = f32;
fn cast_i8(x: i8) -> Self::Output {
x.normalize()
}
fn cast_u8(x: u8) -> Self::Output {
x.normalize()
}
fn cast_i16(x: i16) -> Self::Output {
x.normalize()
}
fn cast_u16(x: u16) -> Self::Output {
x.normalize()
}
fn cast_f32(x: f32) -> Self::Output {
x.normalize()
}
}

View File

@@ -0,0 +1,231 @@
use super::Rotations;
use crate::Normalize;
use std::marker::PhantomData;
/// Casting iterator for `Rotations`.
#[derive(Clone, Debug)]
pub struct CastingIter<'a, T>(Rotations<'a>, PhantomData<T>);
/// Type which describes how to cast any weight into i8.
#[derive(Clone, Debug)]
pub struct I8;
/// Type which describes how to cast any weight into u8.
#[derive(Clone, Debug)]
pub struct U8;
/// Type which describes how to cast any weight into i16.
#[derive(Clone, Debug)]
pub struct I16;
/// Type which describes how to cast any weight into u16.
#[derive(Clone, Debug)]
pub struct U16;
/// Type which describes how to cast any weight into f32.
#[derive(Clone, Debug)]
pub struct F32;
/// Trait for types which describe casting behaviour.
pub trait Cast {
/// Output type.
type Output;
/// Cast from i8.
fn cast_i8(x: [i8; 4]) -> Self::Output;
/// Cast from u8.
fn cast_u8(x: [u8; 4]) -> Self::Output;
/// Cast from i16.
fn cast_i16(x: [i16; 4]) -> Self::Output;
/// Cast from u16.
fn cast_u16(x: [u16; 4]) -> Self::Output;
/// Cast from f32.
fn cast_f32(x: [f32; 4]) -> Self::Output;
}
impl<'a, A> CastingIter<'a, A> {
pub(crate) fn new(iter: Rotations<'a>) -> Self {
CastingIter(iter, PhantomData)
}
/// Unwrap underlying `Rotations` object.
pub fn unwrap(self) -> Rotations<'a> {
self.0
}
}
impl<'a, A: Cast> ExactSizeIterator for CastingIter<'a, A> {}
impl<'a, A: Cast> Iterator for CastingIter<'a, A> {
type Item = A::Output;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
match self.0 {
Rotations::I8(ref mut i) => i.next().map(A::cast_i8),
Rotations::U8(ref mut i) => i.next().map(A::cast_u8),
Rotations::I16(ref mut i) => i.next().map(A::cast_i16),
Rotations::U16(ref mut i) => i.next().map(A::cast_u16),
Rotations::F32(ref mut i) => i.next().map(A::cast_f32),
}
}
#[inline]
fn nth(&mut self, x: usize) -> Option<Self::Item> {
match self.0 {
Rotations::I8(ref mut i) => i.nth(x).map(A::cast_i8),
Rotations::U8(ref mut i) => i.nth(x).map(A::cast_u8),
Rotations::I16(ref mut i) => i.nth(x).map(A::cast_i16),
Rotations::U16(ref mut i) => i.nth(x).map(A::cast_u16),
Rotations::F32(ref mut i) => i.nth(x).map(A::cast_f32),
}
}
fn last(self) -> Option<Self::Item> {
match self.0 {
Rotations::I8(i) => i.last().map(A::cast_i8),
Rotations::U8(i) => i.last().map(A::cast_u8),
Rotations::I16(i) => i.last().map(A::cast_i16),
Rotations::U16(i) => i.last().map(A::cast_u16),
Rotations::F32(i) => i.last().map(A::cast_f32),
}
}
fn count(self) -> usize {
self.size_hint().0
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
match self.0 {
Rotations::I8(ref i) => i.size_hint(),
Rotations::U8(ref i) => i.size_hint(),
Rotations::I16(ref i) => i.size_hint(),
Rotations::U16(ref i) => i.size_hint(),
Rotations::F32(ref i) => i.size_hint(),
}
}
}
impl Cast for I8 {
type Output = [i8; 4];
fn cast_i8(x: [i8; 4]) -> Self::Output {
x.normalize()
}
fn cast_u8(x: [u8; 4]) -> Self::Output {
x.normalize()
}
fn cast_i16(x: [i16; 4]) -> Self::Output {
x.normalize()
}
fn cast_u16(x: [u16; 4]) -> Self::Output {
x.normalize()
}
fn cast_f32(x: [f32; 4]) -> Self::Output {
x.normalize()
}
}
impl Cast for i8 {
type Output = [u8; 4];
fn cast_i8(x: [i8; 4]) -> Self::Output {
x.normalize()
}
fn cast_u8(x: [u8; 4]) -> Self::Output {
x.normalize()
}
fn cast_i16(x: [i16; 4]) -> Self::Output {
x.normalize()
}
fn cast_u16(x: [u16; 4]) -> Self::Output {
x.normalize()
}
fn cast_f32(x: [f32; 4]) -> Self::Output {
x.normalize()
}
}
impl Cast for I16 {
type Output = [i16; 4];
fn cast_i8(x: [i8; 4]) -> Self::Output {
x.normalize()
}
fn cast_u8(x: [u8; 4]) -> Self::Output {
x.normalize()
}
fn cast_i16(x: [i16; 4]) -> Self::Output {
x.normalize()
}
fn cast_u16(x: [u16; 4]) -> Self::Output {
x.normalize()
}
fn cast_f32(x: [f32; 4]) -> Self::Output {
x.normalize()
}
}
impl Cast for U16 {
type Output = [u16; 4];
fn cast_i8(x: [i8; 4]) -> Self::Output {
x.normalize()
}
fn cast_u8(x: [u8; 4]) -> Self::Output {
x.normalize()
}
fn cast_i16(x: [i16; 4]) -> Self::Output {
x.normalize()
}
fn cast_u16(x: [u16; 4]) -> Self::Output {
x.normalize()
}
fn cast_f32(x: [f32; 4]) -> Self::Output {
x.normalize()
}
}
impl Cast for F32 {
type Output = [f32; 4];
fn cast_i8(x: [i8; 4]) -> Self::Output {
x.normalize()
}
fn cast_u8(x: [u8; 4]) -> Self::Output {
x.normalize()
}
fn cast_i16(x: [i16; 4]) -> Self::Output {
x.normalize()
}
fn cast_u16(x: [u16; 4]) -> Self::Output {
x.normalize()
}
fn cast_f32(x: [f32; 4]) -> Self::Output {
x.normalize()
}
}

328
vendor/gltf/src/binary.rs vendored Normal file
View File

@@ -0,0 +1,328 @@
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use std::borrow::Cow;
use std::{fmt, io, mem};
/// Represents a Glb loader error.
#[derive(Debug)]
pub enum Error {
/// Io error occured.
Io(::std::io::Error),
/// Unsupported version.
Version(u32),
/// Magic says that file is not glTF.
Magic([u8; 4]),
/// Length specified in GLB header exceeeds that of slice.
Length {
/// length specified in GLB header.
length: u32,
/// Actual length of data read.
length_read: usize,
},
/// Stream ended before we could read the chunk.
ChunkLength {
/// chunkType error happened at.
ty: ChunkType,
/// chunkLength.
length: u32,
/// Actual length of data read.
length_read: usize,
},
/// Chunk of this chunkType was not expected.
ChunkType(ChunkType),
/// Unknown chunk type.
UnknownChunkType([u8; 4]),
}
/// Binary glTF contents.
#[derive(Clone, Debug)]
pub struct Glb<'a> {
/// The header section of the `.glb` file.
pub header: Header,
/// The JSON section of the `.glb` file.
pub json: Cow<'a, [u8]>,
/// The optional BIN section of the `.glb` file.
pub bin: Option<Cow<'a, [u8]>>,
}
/// The header section of a .glb file.
#[derive(Copy, Clone, Debug)]
#[repr(C)]
pub struct Header {
/// Must be `b"glTF"`.
pub magic: [u8; 4],
/// Must be `2`.
pub version: u32,
/// Must match the length of the parent .glb file.
pub length: u32,
}
/// GLB chunk type.
#[derive(Copy, Clone, Debug)]
pub enum ChunkType {
/// `JSON` chunk.
Json,
/// `BIN` chunk.
Bin,
}
/// Chunk header with no data read yet.
#[derive(Copy, Clone, Debug)]
#[repr(C)]
struct ChunkHeader {
/// The length of the chunk data in byte excluding the header.
length: u32,
/// Chunk type.
ty: ChunkType,
}
impl Header {
fn from_reader<R: io::Read>(mut reader: R) -> Result<Self, Error> {
use self::Error::Io;
let mut magic = [0; 4];
reader.read_exact(&mut magic).map_err(Io)?;
// We only validate magic as we don't care for version and length of
// contents, the caller does. Let them decide what to do next with
// regard to version and length.
if &magic == b"glTF" {
Ok(Self {
magic,
version: reader.read_u32::<LittleEndian>().map_err(Io)?,
length: reader.read_u32::<LittleEndian>().map_err(Io)?,
})
} else {
Err(Error::Magic(magic))
}
}
fn size_of() -> usize {
12
}
}
impl ChunkHeader {
fn from_reader<R: io::Read>(mut reader: R) -> Result<Self, Error> {
use self::Error::Io;
let length = reader.read_u32::<LittleEndian>().map_err(Io)?;
let mut ty = [0; 4];
reader.read_exact(&mut ty).map_err(Io)?;
let ty = match &ty {
b"JSON" => Ok(ChunkType::Json),
b"BIN\0" => Ok(ChunkType::Bin),
_ => Err(Error::UnknownChunkType(ty)),
}?;
Ok(Self { length, ty })
}
}
fn align_to_multiple_of_four(n: &mut usize) {
*n = (*n + 3) & !3;
}
fn split_binary_gltf(mut data: &[u8]) -> Result<(&[u8], Option<&[u8]>), Error> {
let (json, mut data) = ChunkHeader::from_reader(&mut data)
.and_then(|json_h| {
if let ChunkType::Json = json_h.ty {
Ok(json_h)
} else {
Err(Error::ChunkType(json_h.ty))
}
})
.and_then(|json_h| {
if json_h.length as usize <= data.len() {
Ok(json_h)
} else {
Err(Error::ChunkLength {
ty: json_h.ty,
length: json_h.length,
length_read: data.len(),
})
}
})
// We have verified that json_h.length is no greater than that of
// data.len().
.map(|json_h| data.split_at(json_h.length as usize))?;
let bin = if !data.is_empty() {
ChunkHeader::from_reader(&mut data)
.and_then(|bin_h| {
if let ChunkType::Bin = bin_h.ty {
Ok(bin_h)
} else {
Err(Error::ChunkType(bin_h.ty))
}
})
.and_then(|bin_h| {
if bin_h.length as usize <= data.len() {
Ok(bin_h)
} else {
Err(Error::ChunkLength {
ty: bin_h.ty,
length: bin_h.length,
length_read: data.len(),
})
}
})
// We have verified that bin_h.length is no greater than that
// of data.len().
.map(|bin_h| data.split_at(bin_h.length as usize))
.map(|(x, _)| Some(x))?
} else {
None
};
Ok((json, bin))
}
impl<'a> Glb<'a> {
/// Writes binary glTF to a writer.
pub fn to_writer<W>(&self, mut writer: W) -> Result<(), crate::Error>
where
W: io::Write,
{
// Write GLB header
{
let magic = b"glTF";
let version = 2;
let mut length =
mem::size_of::<Header>() + mem::size_of::<ChunkHeader>() + self.json.len();
align_to_multiple_of_four(&mut length);
if let Some(bin) = self.bin.as_ref() {
length += mem::size_of::<ChunkHeader>() + bin.len();
align_to_multiple_of_four(&mut length);
}
writer.write_all(&magic[..])?;
writer.write_u32::<LittleEndian>(version)?;
writer.write_u32::<LittleEndian>(length as u32)?;
}
// Write JSON chunk header
{
let magic = b"JSON";
let mut length = self.json.len();
align_to_multiple_of_four(&mut length);
let padding = length - self.json.len();
writer.write_u32::<LittleEndian>(length as u32)?;
writer.write_all(&magic[..])?;
writer.write_all(&self.json)?;
for _ in 0..padding {
writer.write_u8(0x20)?;
}
}
if let Some(bin) = self.bin.as_ref() {
let magic = b"BIN\0";
let mut length = bin.len();
align_to_multiple_of_four(&mut length);
let padding = length - bin.len();
writer.write_u32::<LittleEndian>(length as u32)?;
writer.write_all(&magic[..])?;
writer.write_all(bin)?;
for _ in 0..padding {
writer.write_u8(0)?;
}
}
Ok(())
}
/// Writes binary glTF to a byte vector.
pub fn to_vec(&self) -> Result<Vec<u8>, crate::Error> {
let mut length = mem::size_of::<Header>() + mem::size_of::<ChunkHeader>() + self.json.len();
align_to_multiple_of_four(&mut length);
if let Some(bin) = self.bin.as_ref() {
length += mem::size_of::<ChunkHeader>() + bin.len();
align_to_multiple_of_four(&mut length);
}
let mut vec = Vec::with_capacity(length);
self.to_writer(&mut vec as &mut dyn io::Write)?;
Ok(vec)
}
/// Splits loaded GLB into its three chunks.
///
/// * Mandatory GLB header.
/// * Mandatory JSON chunk.
/// * Optional BIN chunk.
pub fn from_slice(mut data: &'a [u8]) -> Result<Self, crate::Error> {
let header = Header::from_reader(&mut data)
.and_then(|header| {
let contents_length = header.length as usize - Header::size_of();
if contents_length <= data.len() {
Ok(header)
} else {
Err(Error::Length {
length: contents_length as u32,
length_read: data.len(),
})
}
})
.map_err(crate::Error::Binary)?;
match header.version {
2 => split_binary_gltf(data)
.map(|(json, bin)| Glb {
header,
json: json.into(),
bin: bin.map(Into::into),
})
.map_err(crate::Error::Binary),
x => Err(crate::Error::Binary(Error::Version(x))),
}
}
/// Reads binary glTF from a generic stream of data.
///
/// # Note
///
/// Reading terminates early if the stream does not contain valid binary
/// glTF.
pub fn from_reader<R: io::Read>(mut reader: R) -> Result<Self, crate::Error> {
let header = Header::from_reader(&mut reader).map_err(crate::Error::Binary)?;
match header.version {
2 => {
let glb_len = header.length - Header::size_of() as u32;
let mut buf = vec![0; glb_len as usize];
if let Err(e) = reader.read_exact(&mut buf).map_err(Error::Io) {
Err(crate::Error::Binary(e))
} else {
split_binary_gltf(&buf)
.map(|(json, bin)| Glb {
header,
json: json.to_vec().into(),
bin: bin.map(<[u8]>::to_vec).map(Into::into),
})
.map_err(crate::Error::Binary)
}
}
x => Err(crate::Error::Binary(Error::Version(x))),
}
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"{}",
match *self {
Error::Io(ref e) => return e.fmt(f),
Error::Version(_) => "unsupported version",
Error::Magic(_) => "not glTF magic",
Error::Length { .. } => "could not completely read the object",
Error::ChunkLength { ty, .. } => match ty {
ChunkType::Json => "JSON chunk length exceeds that of slice",
ChunkType::Bin => "BIN\\0 chunk length exceeds that of slice",
},
Error::ChunkType(ty) => match ty {
ChunkType::Json => "was not expecting JSON chunk",
ChunkType::Bin => "was not expecting BIN\\0 chunk",
},
Error::UnknownChunkType(_) => "unknown chunk type",
}
)
}
}
impl ::std::error::Error for Error {}

209
vendor/gltf/src/buffer.rs vendored Normal file
View File

@@ -0,0 +1,209 @@
#[cfg(feature = "import")]
use std::ops;
use crate::Document;
pub use json::buffer::Target;
#[cfg(feature = "extensions")]
use serde_json::{Map, Value};
/// A buffer points to binary data representing geometry, animations, or skins.
#[derive(Clone, Debug)]
pub struct Buffer<'a> {
/// The parent `Document` struct.
#[allow(dead_code)]
document: &'a Document,
/// The corresponding JSON index.
index: usize,
/// The corresponding JSON struct.
json: &'a json::buffer::Buffer,
}
/// A view into a buffer generally representing a subset of the buffer.
#[derive(Clone, Debug)]
pub struct View<'a> {
/// The parent `Document` struct.
document: &'a Document,
/// The corresponding JSON index.
index: usize,
/// The corresponding JSON struct.
json: &'a json::buffer::View,
/// The parent `Buffer`.
#[allow(dead_code)]
parent: Buffer<'a>,
}
/// Describes a buffer data source.
#[derive(Clone, Debug)]
pub enum Source<'a> {
/// Buffer data is contained in the `BIN` section of binary glTF.
Bin,
/// Buffer data is contained in an external data source.
Uri(&'a str),
}
/// Buffer data belonging to an imported glTF asset.
#[cfg(feature = "import")]
#[cfg_attr(docsrs, doc(cfg(feature = "import")))]
#[derive(Clone, Debug)]
pub struct Data(pub Vec<u8>);
#[cfg(feature = "import")]
#[cfg_attr(docsrs, doc(cfg(feature = "import")))]
impl ops::Deref for Data {
type Target = [u8];
fn deref(&self) -> &Self::Target {
self.0.as_slice()
}
}
impl<'a> Buffer<'a> {
/// Constructs a `Buffer`.
pub(crate) fn new(
document: &'a Document,
index: usize,
json: &'a json::buffer::Buffer,
) -> Self {
Self {
document,
index,
json,
}
}
/// Returns the internal JSON index.
pub fn index(&self) -> usize {
self.index
}
/// Returns the buffer data source.
pub fn source(&self) -> Source<'a> {
if let Some(uri) = self.json.uri.as_deref() {
Source::Uri(uri)
} else {
Source::Bin
}
}
/// The length of the buffer in bytes.
pub fn length(&self) -> usize {
self.json.byte_length.0 as usize
}
/// Optional user-defined name for this object.
#[cfg(feature = "names")]
#[cfg_attr(docsrs, doc(cfg(feature = "names")))]
pub fn name(&self) -> Option<&'a str> {
self.json.name.as_deref()
}
/// Returns extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extensions(&self) -> Option<&Map<String, Value>> {
let ext = self.json.extensions.as_ref()?;
Some(&ext.others)
}
/// Queries extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extension_value(&self, ext_name: &str) -> Option<&Value> {
let ext = self.json.extensions.as_ref()?;
ext.others.get(ext_name)
}
/// Optional application specific data.
pub fn extras(&self) -> &'a json::Extras {
&self.json.extras
}
}
impl<'a> View<'a> {
/// Constructs a `View`.
pub(crate) fn new(document: &'a Document, index: usize, json: &'a json::buffer::View) -> Self {
let parent = document.buffers().nth(json.buffer.value()).unwrap();
Self {
document,
index,
json,
parent,
}
}
/// Returns the internal JSON index.
pub fn index(&self) -> usize {
self.index
}
/// Returns the parent `Buffer`.
pub fn buffer(&self) -> Buffer<'a> {
self.document
.buffers()
.nth(self.json.buffer.value())
.unwrap()
}
/// Returns the length of the buffer view in bytes.
pub fn length(&self) -> usize {
self.json.byte_length.0 as usize
}
/// Returns the offset into the parent buffer in bytes.
pub fn offset(&self) -> usize {
self.json.byte_offset.unwrap_or_default().0 as usize
}
/// Returns the stride in bytes between vertex attributes or other interleavable
/// data. When `None`, data is assumed to be tightly packed.
pub fn stride(&self) -> Option<usize> {
self.json.byte_stride.and_then(|x| {
// Treat byte_stride == 0 same as not specifying stride.
// This is technically a validation error, but best way we can handle it here
if x.0 == 0 {
None
} else {
Some(x.0)
}
})
}
/// Optional user-defined name for this object.
#[cfg(feature = "names")]
#[cfg_attr(docsrs, doc(cfg(feature = "names")))]
pub fn name(&self) -> Option<&'a str> {
self.json.name.as_deref()
}
/// Optional target the buffer should be bound to.
pub fn target(&self) -> Option<Target> {
self.json.target.map(|target| target.unwrap())
}
/// Returns extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extensions(&self) -> Option<&Map<String, Value>> {
let ext = self.json.extensions.as_ref()?;
Some(&ext.others)
}
/// Queries extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extension_value(&self, ext_name: &str) -> Option<&Value> {
let ext = self.json.extensions.as_ref()?;
ext.others.get(ext_name)
}
/// Optional application specific data.
pub fn extras(&self) -> &'a json::Extras {
&self.json.extras
}
}

208
vendor/gltf/src/camera.rs vendored Normal file
View File

@@ -0,0 +1,208 @@
use crate::Document;
#[cfg(feature = "extensions")]
use serde_json::{Map, Value};
/// A camera's projection.
#[derive(Clone, Debug)]
pub enum Projection<'a> {
/// Describes an orthographic projection.
Orthographic(Orthographic<'a>),
/// Describes a perspective projection.
Perspective(Perspective<'a>),
}
/// A camera's projection. A node can reference a camera to apply a transform to
/// place the camera in the scene.
#[derive(Clone, Debug)]
pub struct Camera<'a> {
/// The parent `Document` struct.
document: &'a Document,
/// The corresponding JSON index.
index: usize,
/// The corresponding JSON struct.
json: &'a json::camera::Camera,
}
/// Values for an orthographic camera projection.
#[derive(Clone, Debug)]
pub struct Orthographic<'a> {
/// The parent `Document` struct.
#[allow(dead_code)]
document: &'a Document,
/// The corresponding JSON struct.
json: &'a json::camera::Orthographic,
}
/// Values for a perspective camera projection.
#[derive(Clone, Debug)]
pub struct Perspective<'a> {
/// The parent `Document` struct.
#[allow(dead_code)]
document: &'a Document,
/// The corresponding JSON struct.
json: &'a json::camera::Perspective,
}
impl<'a> Camera<'a> {
/// Constructs a `Camera`.
pub(crate) fn new(
document: &'a Document,
index: usize,
json: &'a json::camera::Camera,
) -> Self {
Self {
document,
index,
json,
}
}
/// Returns the internal JSON index.
pub fn index(&self) -> usize {
self.index
}
/// Optional user-defined name for this object.
#[cfg(feature = "names")]
#[cfg_attr(docsrs, doc(cfg(feature = "names")))]
pub fn name(&self) -> Option<&'a str> {
self.json.name.as_deref()
}
/// Returns the camera's projection.
pub fn projection(&self) -> Projection {
match self.json.type_.unwrap() {
json::camera::Type::Orthographic => {
let json = self.json.orthographic.as_ref().unwrap();
Projection::Orthographic(Orthographic::new(self.document, json))
}
json::camera::Type::Perspective => {
let json = self.json.perspective.as_ref().unwrap();
Projection::Perspective(Perspective::new(self.document, json))
}
}
}
/// Returns extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extensions(&self) -> Option<&Map<String, Value>> {
let ext = self.json.extensions.as_ref()?;
Some(&ext.others)
}
/// Queries extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extension_value(&self, ext_name: &str) -> Option<&Value> {
let ext = self.json.extensions.as_ref()?;
ext.others.get(ext_name)
}
/// Optional application specific data.
pub fn extras(&self) -> &'a json::Extras {
&self.json.extras
}
}
impl<'a> Orthographic<'a> {
/// Constructs a `Orthographic` camera projection.
pub(crate) fn new(document: &'a Document, json: &'a json::camera::Orthographic) -> Self {
Self { document, json }
}
/// The horizontal magnification of the view.
pub fn xmag(&self) -> f32 {
self.json.xmag
}
/// The vertical magnification of the view.
pub fn ymag(&self) -> f32 {
self.json.ymag
}
/// The distance to the far clipping plane.
pub fn zfar(&self) -> f32 {
self.json.zfar
}
/// The distance to the near clipping plane.
pub fn znear(&self) -> f32 {
self.json.znear
}
/// Returns extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extensions(&self) -> Option<&Map<String, Value>> {
let ext = self.json.extensions.as_ref()?;
Some(&ext.others)
}
/// Queries extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extension_value(&self, ext_name: &str) -> Option<&Value> {
let ext = self.json.extensions.as_ref()?;
ext.others.get(ext_name)
}
/// Optional application specific data.
pub fn extras(&self) -> &'a json::Extras {
&self.json.extras
}
}
impl<'a> Perspective<'a> {
/// Constructs a `Perspective` camera projection.
pub(crate) fn new(document: &'a Document, json: &'a json::camera::Perspective) -> Self {
Self { document, json }
}
/// Aspect ratio of the field of view.
pub fn aspect_ratio(&self) -> Option<f32> {
self.json.aspect_ratio
}
/// The vertical field of view in radians.
pub fn yfov(&self) -> f32 {
self.json.yfov
}
/// The distance to the far clipping plane.
pub fn zfar(&self) -> Option<f32> {
self.json.zfar
}
/// The distance to the near clipping plane.
pub fn znear(&self) -> f32 {
self.json.znear
}
/// Returns extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extensions(&self) -> Option<&Map<String, Value>> {
let ext = self.json.extensions.as_ref()?;
Some(&ext.others)
}
/// Queries extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extension_value(&self, ext_name: &str) -> Option<&Value> {
let ext = self.json.extensions.as_ref()?;
ext.others.get(ext_name)
}
/// Optional application specific data.
pub fn extras(&self) -> &'a json::Extras {
&self.json.extras
}
}

184
vendor/gltf/src/image.rs vendored Normal file
View File

@@ -0,0 +1,184 @@
#[allow(unused)]
use crate::{buffer, Document, Error, Result};
#[cfg(feature = "import")]
#[cfg_attr(docsrs, doc(cfg(feature = "import")))]
use image_crate::DynamicImage;
#[cfg(feature = "extensions")]
use serde_json::{Map, Value};
/// Format of image pixel data.
#[cfg(feature = "import")]
#[cfg_attr(docsrs, doc(cfg(feature = "import")))]
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub enum Format {
/// Red only.
R8,
/// Red, green.
R8G8,
/// Red, green, blue.
R8G8B8,
/// Red, green, blue, alpha.
R8G8B8A8,
/// Red only (16 bits).
R16,
/// Red, green (16 bits).
R16G16,
/// Red, green, blue (16 bits).
R16G16B16,
/// Red, green, blue, alpha (16 bits).
R16G16B16A16,
/// Red, green, blue (32 bits float)
R32G32B32FLOAT,
/// Red, green, blue, alpha (32 bits float)
R32G32B32A32FLOAT,
}
/// Describes an image data source.
#[derive(Clone, Debug)]
pub enum Source<'a> {
/// Image data is contained in a buffer view.
View {
/// The buffer view containing the encoded image data.
view: buffer::View<'a>,
/// The image data MIME type.
mime_type: &'a str,
},
/// Image data is contained in an external data source.
Uri {
/// The URI of the external data source.
uri: &'a str,
/// The image data MIME type, if provided.
mime_type: Option<&'a str>,
},
}
/// Image data used to create a texture.
#[derive(Clone, Debug)]
pub struct Image<'a> {
/// The parent `Document` struct.
document: &'a Document,
/// The corresponding JSON index.
index: usize,
/// The corresponding JSON struct.
json: &'a json::image::Image,
}
/// Image data belonging to an imported glTF asset.
#[cfg(feature = "import")]
#[cfg_attr(docsrs, doc(cfg(feature = "import")))]
#[derive(Clone, Debug)]
pub struct Data {
/// The image pixel data (8 bits per channel).
pub pixels: Vec<u8>,
/// The image pixel data format.
pub format: Format,
/// The image width in pixels.
pub width: u32,
/// The image height in pixels.
pub height: u32,
}
impl<'a> Image<'a> {
/// Constructs an `Image` from owned data.
pub(crate) fn new(document: &'a Document, index: usize, json: &'a json::image::Image) -> Self {
Self {
document,
index,
json,
}
}
/// Returns the internal JSON index.
pub fn index(&self) -> usize {
self.index
}
/// Optional user-defined name for this object.
#[cfg(feature = "names")]
#[cfg_attr(docsrs, doc(cfg(feature = "names")))]
pub fn name(&self) -> Option<&'a str> {
self.json.name.as_deref()
}
/// Returns the image data source.
pub fn source(&self) -> Source<'a> {
if let Some(index) = self.json.buffer_view.as_ref() {
let view = self.document.views().nth(index.value()).unwrap();
let mime_type = self.json.mime_type.as_ref().map(|x| x.0.as_str()).unwrap();
Source::View { view, mime_type }
} else {
let uri = self.json.uri.as_ref().unwrap();
let mime_type = self.json.mime_type.as_ref().map(|x| x.0.as_str());
Source::Uri { uri, mime_type }
}
}
/// Returns extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extensions(&self) -> Option<&Map<String, Value>> {
let ext = self.json.extensions.as_ref()?;
Some(&ext.others)
}
/// Queries extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extension_value(&self, ext_name: &str) -> Option<&Value> {
let ext = self.json.extensions.as_ref()?;
ext.others.get(ext_name)
}
/// Optional application specific data.
pub fn extras(&self) -> &'a json::Extras {
&self.json.extras
}
}
#[cfg(feature = "import")]
impl Data {
/// Note: We don't implement `From<DynamicImage>` since we don't want
/// to expose such functionality to the user.
pub(crate) fn new(image: DynamicImage) -> Result<Self> {
use image_crate::GenericImageView;
let format = match image {
DynamicImage::ImageLuma8(_) => Format::R8,
DynamicImage::ImageLumaA8(_) => Format::R8G8,
DynamicImage::ImageRgb8(_) => Format::R8G8B8,
DynamicImage::ImageRgba8(_) => Format::R8G8B8A8,
DynamicImage::ImageLuma16(_) => Format::R16,
DynamicImage::ImageLumaA16(_) => Format::R16G16,
DynamicImage::ImageRgb16(_) => Format::R16G16B16,
DynamicImage::ImageRgba16(_) => Format::R16G16B16A16,
DynamicImage::ImageRgb32F(_) => Format::R32G32B32FLOAT,
DynamicImage::ImageRgba32F(_) => Format::R32G32B32A32FLOAT,
image => return Err(Error::UnsupportedImageFormat(image)),
};
let (width, height) = image.dimensions();
let pixels = image.into_bytes();
Ok(Data {
format,
width,
height,
pixels,
})
}
}

316
vendor/gltf/src/import.rs vendored Normal file
View File

@@ -0,0 +1,316 @@
use crate::buffer;
use crate::image;
use std::borrow::Cow;
use std::{fs, io};
use crate::{Document, Error, Gltf, Result};
use image_crate::ImageFormat::{Jpeg, Png};
use std::path::Path;
/// Return type of `import`.
type Import = (Document, Vec<buffer::Data>, Vec<image::Data>);
/// Represents the set of URI schemes the importer supports.
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
enum Scheme<'a> {
/// `data:[<media type>];base64,<data>`.
Data(Option<&'a str>, &'a str),
/// `file:[//]<absolute file path>`.
///
/// Note: The file scheme does not implement authority.
File(&'a str),
/// `../foo`, etc.
Relative(Cow<'a, str>),
/// Placeholder for an unsupported URI scheme identifier.
Unsupported,
}
impl<'a> Scheme<'a> {
fn parse(uri: &str) -> Scheme<'_> {
if uri.contains(':') {
if let Some(rest) = uri.strip_prefix("data:") {
let mut it = rest.split(";base64,");
match (it.next(), it.next()) {
(match0_opt, Some(match1)) => Scheme::Data(match0_opt, match1),
(Some(match0), _) => Scheme::Data(None, match0),
_ => Scheme::Unsupported,
}
} else if let Some(rest) = uri.strip_prefix("file://") {
Scheme::File(rest)
} else if let Some(rest) = uri.strip_prefix("file:") {
Scheme::File(rest)
} else {
Scheme::Unsupported
}
} else {
Scheme::Relative(urlencoding::decode(uri).unwrap())
}
}
fn read(base: Option<&Path>, uri: &str) -> Result<Vec<u8>> {
match Scheme::parse(uri) {
// The path may be unused in the Scheme::Data case
// Example: "uri" : "data:application/octet-stream;base64,wsVHPgA...."
Scheme::Data(_, base64) => base64::decode(base64).map_err(Error::Base64),
Scheme::File(path) if base.is_some() => read_to_end(path),
Scheme::Relative(path) if base.is_some() => read_to_end(base.unwrap().join(&*path)),
Scheme::Unsupported => Err(Error::UnsupportedScheme),
_ => Err(Error::ExternalReferenceInSliceImport),
}
}
}
fn read_to_end<P>(path: P) -> Result<Vec<u8>>
where
P: AsRef<Path>,
{
use io::Read;
let file = fs::File::open(path.as_ref()).map_err(Error::Io)?;
// Allocate one extra byte so the buffer doesn't need to grow before the
// final `read` call at the end of the file. Don't worry about `usize`
// overflow because reading will fail regardless in that case.
let length = file.metadata().map(|x| x.len() + 1).unwrap_or(0);
let mut reader = io::BufReader::new(file);
let mut data = Vec::with_capacity(length as usize);
reader.read_to_end(&mut data).map_err(Error::Io)?;
Ok(data)
}
impl buffer::Data {
/// Construct a buffer data object by reading the given source.
/// If `base` is provided, then external filesystem references will
/// be resolved from this directory.
pub fn from_source(source: buffer::Source<'_>, base: Option<&Path>) -> Result<Self> {
Self::from_source_and_blob(source, base, &mut None)
}
/// Construct a buffer data object by reading the given source.
/// If `base` is provided, then external filesystem references will
/// be resolved from this directory.
/// `blob` represents the `BIN` section of a binary glTF file,
/// and it will be taken to fill the buffer if the `source` refers to it.
pub fn from_source_and_blob(
source: buffer::Source<'_>,
base: Option<&Path>,
blob: &mut Option<Vec<u8>>,
) -> Result<Self> {
let mut data = match source {
buffer::Source::Uri(uri) => Scheme::read(base, uri),
buffer::Source::Bin => blob.take().ok_or(Error::MissingBlob),
}?;
while data.len() % 4 != 0 {
data.push(0);
}
Ok(buffer::Data(data))
}
}
/// Import buffer data referenced by a glTF document.
///
/// ### Note
///
/// This function is intended for advanced users who wish to forego loading image data.
/// A typical user should call [`import`] instead.
pub fn import_buffers(
document: &Document,
base: Option<&Path>,
mut blob: Option<Vec<u8>>,
) -> Result<Vec<buffer::Data>> {
let mut buffers = Vec::new();
for buffer in document.buffers() {
let data = buffer::Data::from_source_and_blob(buffer.source(), base, &mut blob)?;
if data.len() < buffer.length() {
return Err(Error::BufferLength {
buffer: buffer.index(),
expected: buffer.length(),
actual: data.len(),
});
}
buffers.push(data);
}
Ok(buffers)
}
impl image::Data {
/// Construct an image data object by reading the given source.
/// If `base` is provided, then external filesystem references will
/// be resolved from this directory.
pub fn from_source(
source: image::Source<'_>,
base: Option<&Path>,
buffer_data: &[buffer::Data],
) -> Result<Self> {
#[cfg(feature = "guess_mime_type")]
let guess_format = |encoded_image: &[u8]| match image_crate::guess_format(encoded_image) {
Ok(image_crate::ImageFormat::Png) => Some(Png),
Ok(image_crate::ImageFormat::Jpeg) => Some(Jpeg),
_ => None,
};
#[cfg(not(feature = "guess_mime_type"))]
let guess_format = |_encoded_image: &[u8]| None;
let decoded_image = match source {
image::Source::Uri { uri, mime_type } if base.is_some() => match Scheme::parse(uri) {
Scheme::Data(Some(annoying_case), base64) => {
let encoded_image = base64::decode(base64).map_err(Error::Base64)?;
let encoded_format = match annoying_case {
"image/png" => Png,
"image/jpeg" => Jpeg,
_ => match guess_format(&encoded_image) {
Some(format) => format,
None => return Err(Error::UnsupportedImageEncoding),
},
};
image_crate::load_from_memory_with_format(&encoded_image, encoded_format)?
}
Scheme::Unsupported => return Err(Error::UnsupportedScheme),
_ => {
let encoded_image = Scheme::read(base, uri)?;
let encoded_format = match mime_type {
Some("image/png") => Png,
Some("image/jpeg") => Jpeg,
Some(_) => match guess_format(&encoded_image) {
Some(format) => format,
None => return Err(Error::UnsupportedImageEncoding),
},
None => match uri.rsplit('.').next() {
Some("png") => Png,
Some("jpg") | Some("jpeg") => Jpeg,
_ => match guess_format(&encoded_image) {
Some(format) => format,
None => return Err(Error::UnsupportedImageEncoding),
},
},
};
image_crate::load_from_memory_with_format(&encoded_image, encoded_format)?
}
},
image::Source::View { view, mime_type } => {
let parent_buffer_data = &buffer_data[view.buffer().index()].0;
let begin = view.offset();
let end = begin + view.length();
let encoded_image = &parent_buffer_data[begin..end];
let encoded_format = match mime_type {
"image/png" => Png,
"image/jpeg" => Jpeg,
_ => match guess_format(encoded_image) {
Some(format) => format,
None => return Err(Error::UnsupportedImageEncoding),
},
};
image_crate::load_from_memory_with_format(encoded_image, encoded_format)?
}
_ => return Err(Error::ExternalReferenceInSliceImport),
};
image::Data::new(decoded_image)
}
}
/// Import image data referenced by a glTF document.
///
/// ### Note
///
/// This function is intended for advanced users who wish to forego loading buffer data.
/// A typical user should call [`import`] instead.
pub fn import_images(
document: &Document,
base: Option<&Path>,
buffer_data: &[buffer::Data],
) -> Result<Vec<image::Data>> {
let mut images = Vec::new();
for image in document.images() {
images.push(image::Data::from_source(image.source(), base, buffer_data)?);
}
Ok(images)
}
fn import_impl(Gltf { document, blob }: Gltf, base: Option<&Path>) -> Result<Import> {
let buffer_data = import_buffers(&document, base, blob)?;
let image_data = import_images(&document, base, &buffer_data)?;
let import = (document, buffer_data, image_data);
Ok(import)
}
fn import_path(path: &Path) -> Result<Import> {
let base = path.parent().unwrap_or_else(|| Path::new("./"));
let file = fs::File::open(path).map_err(Error::Io)?;
let reader = io::BufReader::new(file);
import_impl(Gltf::from_reader(reader)?, Some(base))
}
/// Import glTF 2.0 from the file system.
///
/// ```
/// # fn run() -> Result<(), gltf::Error> {
/// # let path = "examples/Box.gltf";
/// # #[allow(unused)]
/// let (document, buffers, images) = gltf::import(path)?;
/// # Ok(())
/// # }
/// # fn main() {
/// # run().expect("test failure");
/// # }
/// ```
///
/// ### Note
///
/// This function is provided as a convenience for loading glTF and associated
/// resources from the file system. It is suitable for real world use but may
/// not be suitable for all real world use cases. More complex import scenarios
/// such downloading from web URLs are not handled by this function. These
/// scenarios are delegated to the user.
///
/// You can read glTF without loading resources by constructing the [`Gltf`]
/// (standard glTF) or [`Glb`] (binary glTF) data structures explicitly.
///
/// [`Gltf`]: struct.Gltf.html
/// [`Glb`]: struct.Glb.html
pub fn import<P>(path: P) -> Result<Import>
where
P: AsRef<Path>,
{
import_path(path.as_ref())
}
fn import_slice_impl(slice: &[u8]) -> Result<Import> {
import_impl(Gltf::from_slice(slice)?, None)
}
/// Import glTF 2.0 from a slice.
///
/// File paths in the document are assumed to be relative to the current working
/// directory.
///
/// ### Note
///
/// This function is intended for advanced users.
/// A typical user should call [`import`] instead.
///
/// ```
/// # extern crate gltf;
/// # use std::fs;
/// # use std::io::Read;
/// # fn run() -> Result<(), gltf::Error> {
/// # let path = "examples/Box.glb";
/// # let mut file = fs::File::open(path).map_err(gltf::Error::Io)?;
/// # let mut bytes = Vec::new();
/// # file.read_to_end(&mut bytes).map_err(gltf::Error::Io)?;
/// # #[allow(unused)]
/// let (document, buffers, images) = gltf::import_slice(bytes.as_slice())?;
/// # Ok(())
/// # }
/// # fn main() {
/// # run().expect("test failure");
/// # }
/// ```
pub fn import_slice<S>(slice: S) -> Result<Import>
where
S: AsRef<[u8]>,
{
import_slice_impl(slice.as_ref())
}

626
vendor/gltf/src/iter.rs vendored Normal file
View File

@@ -0,0 +1,626 @@
use std::{iter, slice};
use crate::accessor::Accessor;
use crate::animation::Animation;
use crate::buffer::{Buffer, View};
use crate::camera::Camera;
use crate::image::Image;
use crate::material::Material;
use crate::mesh::Mesh;
use crate::scene::{Node, Scene};
use crate::skin::Skin;
use crate::texture::{Sampler, Texture};
use crate::Document;
/// An `Iterator` that visits extension strings used by a glTF asset.
#[derive(Clone, Debug)]
pub struct ExtensionsUsed<'a>(pub(crate) slice::Iter<'a, String>);
/// An `Iterator` that visits extension strings required by a glTF asset.
#[derive(Clone, Debug)]
pub struct ExtensionsRequired<'a>(pub(crate) slice::Iter<'a, String>);
/// An `Iterator` that visits every accessor in a glTF asset.
#[derive(Clone, Debug)]
pub struct Accessors<'a> {
/// Internal accessor iterator.
pub(crate) iter: iter::Enumerate<slice::Iter<'a, json::accessor::Accessor>>,
/// The internal root glTF object.
pub(crate) document: &'a Document,
}
/// An `Iterator` that visits every animation in a glTF asset.
#[derive(Clone, Debug)]
pub struct Animations<'a> {
/// Internal animation iterator.
pub(crate) iter: iter::Enumerate<slice::Iter<'a, json::animation::Animation>>,
/// The internal root glTF object.
pub(crate) document: &'a Document,
}
/// An `Iterator` that visits every buffer in a glTF asset.
#[derive(Clone, Debug)]
pub struct Buffers<'a> {
/// Internal buffer iterator.
pub(crate) iter: iter::Enumerate<slice::Iter<'a, json::buffer::Buffer>>,
/// The internal root glTF object.
pub(crate) document: &'a Document,
}
/// An `Iterator` that visits every buffer view in a glTF asset.
#[derive(Clone, Debug)]
pub struct Views<'a> {
/// Internal buffer view iterator.
pub(crate) iter: iter::Enumerate<slice::Iter<'a, json::buffer::View>>,
/// The internal root glTF object.
pub(crate) document: &'a Document,
}
/// An `Iterator` that visits every camera in a glTF asset.
#[derive(Clone, Debug)]
pub struct Cameras<'a> {
/// Internal buffer view iterator.
pub(crate) iter: iter::Enumerate<slice::Iter<'a, json::camera::Camera>>,
/// The internal root glTF object.
pub(crate) document: &'a Document,
}
/// An `Iterator` that visits every pre-loaded image in a glTF asset.
#[derive(Clone, Debug)]
pub struct Images<'a> {
/// Internal image iterator.
pub(crate) iter: iter::Enumerate<slice::Iter<'a, json::image::Image>>,
/// The internal root glTF object.
pub(crate) document: &'a Document,
}
/// An `Iterator` that visits every light in a glTF asset.
#[cfg(feature = "KHR_lights_punctual")]
#[derive(Clone, Debug)]
pub struct Lights<'a> {
/// Internal image iterator.
pub(crate) iter:
iter::Enumerate<slice::Iter<'a, json::extensions::scene::khr_lights_punctual::Light>>,
/// The internal root glTF object.
pub(crate) document: &'a Document,
}
/// An `Iterator` that visits every variant in a glTF asset.
#[cfg(feature = "KHR_materials_variants")]
#[derive(Clone, Debug)]
pub struct Variants<'a> {
/// Internal variant iterator.
pub(crate) iter:
iter::Enumerate<slice::Iter<'a, json::extensions::scene::khr_materials_variants::Variant>>,
/// The internal root glTF object.
pub(crate) document: &'a Document,
}
/// An `Iterator` that visits every material in a glTF asset.
#[derive(Clone, Debug)]
pub struct Materials<'a> {
/// Internal material iterator.
pub(crate) iter: iter::Enumerate<slice::Iter<'a, json::material::Material>>,
/// The internal root glTF object.
pub(crate) document: &'a Document,
}
/// An `Iterator` that visits every mesh in a glTF asset.
#[derive(Clone, Debug)]
pub struct Meshes<'a> {
/// Internal mesh iterator.
pub(crate) iter: iter::Enumerate<slice::Iter<'a, json::mesh::Mesh>>,
/// The internal root glTF object.
pub(crate) document: &'a Document,
}
/// An `Iterator` that visits every node in a glTF asset.
#[derive(Clone, Debug)]
pub struct Nodes<'a> {
/// Internal node iterator.
pub(crate) iter: iter::Enumerate<slice::Iter<'a, json::scene::Node>>,
/// The internal root glTF object.
pub(crate) document: &'a Document,
}
/// An `Iterator` that visits every sampler in a glTF asset.
#[derive(Clone, Debug)]
pub struct Samplers<'a> {
/// Internal sampler iterator.
pub(crate) iter: iter::Enumerate<slice::Iter<'a, json::texture::Sampler>>,
/// The internal root glTF object.
pub(crate) document: &'a Document,
}
/// An `Iterator` that visits every scene in a glTF asset.
#[derive(Clone, Debug)]
pub struct Scenes<'a> {
/// Internal scene iterator.
pub(crate) iter: iter::Enumerate<slice::Iter<'a, json::scene::Scene>>,
/// The internal root glTF object.
pub(crate) document: &'a Document,
}
/// An `Iterator` that visits every skin in a glTF asset.
#[derive(Clone, Debug)]
pub struct Skins<'a> {
/// Internal skin iterator.
pub(crate) iter: iter::Enumerate<slice::Iter<'a, json::skin::Skin>>,
/// The internal root glTF object.
pub(crate) document: &'a Document,
}
/// An `Iterator` that visits every texture in a glTF asset.
#[derive(Clone, Debug)]
pub struct Textures<'a> {
/// Internal texture iterator.
pub(crate) iter: iter::Enumerate<slice::Iter<'a, json::texture::Texture>>,
/// The internal root glTF object.
pub(crate) document: &'a Document,
}
impl<'a> ExactSizeIterator for Accessors<'a> {}
impl<'a> Iterator for Accessors<'a> {
type Item = Accessor<'a>;
fn next(&mut self) -> Option<Self::Item> {
self.iter
.next()
.map(|(index, json)| Accessor::new(self.document, index, json))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
fn count(self) -> usize {
self.iter.count()
}
fn last(self) -> Option<Self::Item> {
let document = self.document;
self.iter
.last()
.map(|(index, json)| Accessor::new(document, index, json))
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter
.nth(n)
.map(|(index, json)| Accessor::new(self.document, index, json))
}
}
impl<'a> ExactSizeIterator for Animations<'a> {}
impl<'a> Iterator for Animations<'a> {
type Item = Animation<'a>;
fn next(&mut self) -> Option<Self::Item> {
self.iter
.next()
.map(|(index, json)| Animation::new(self.document, index, json))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
fn count(self) -> usize {
self.iter.count()
}
fn last(self) -> Option<Self::Item> {
let document = self.document;
self.iter
.last()
.map(|(index, json)| Animation::new(document, index, json))
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter
.nth(n)
.map(|(index, json)| Animation::new(self.document, index, json))
}
}
impl<'a> ExactSizeIterator for Buffers<'a> {}
impl<'a> Iterator for Buffers<'a> {
type Item = Buffer<'a>;
fn next(&mut self) -> Option<Self::Item> {
self.iter
.next()
.map(|(index, json)| Buffer::new(self.document, index, json))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
fn count(self) -> usize {
self.iter.count()
}
fn last(self) -> Option<Self::Item> {
let document = self.document;
self.iter
.last()
.map(|(index, json)| Buffer::new(document, index, json))
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter
.nth(n)
.map(|(index, json)| Buffer::new(self.document, index, json))
}
}
impl<'a> ExactSizeIterator for ExtensionsUsed<'a> {}
impl<'a> Iterator for ExtensionsUsed<'a> {
type Item = &'a str;
fn next(&mut self) -> Option<Self::Item> {
self.0.next().map(String::as_str)
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.0.size_hint()
}
fn count(self) -> usize {
self.0.count()
}
fn last(self) -> Option<Self::Item> {
self.0.last().map(String::as_str)
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.0.nth(n).map(String::as_str)
}
}
impl<'a> ExactSizeIterator for ExtensionsRequired<'a> {}
impl<'a> Iterator for ExtensionsRequired<'a> {
type Item = &'a str;
fn next(&mut self) -> Option<Self::Item> {
self.0.next().map(String::as_str)
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.0.size_hint()
}
fn count(self) -> usize {
self.0.count()
}
fn last(self) -> Option<Self::Item> {
self.0.last().map(String::as_str)
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.0.nth(n).map(String::as_str)
}
}
impl<'a> ExactSizeIterator for Views<'a> {}
impl<'a> Iterator for Views<'a> {
type Item = View<'a>;
fn next(&mut self) -> Option<Self::Item> {
self.iter
.next()
.map(|(index, json)| View::new(self.document, index, json))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
fn count(self) -> usize {
self.iter.count()
}
fn last(self) -> Option<Self::Item> {
let document = self.document;
self.iter
.last()
.map(|(index, json)| View::new(document, index, json))
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter
.nth(n)
.map(|(index, json)| View::new(self.document, index, json))
}
}
impl<'a> ExactSizeIterator for Cameras<'a> {}
impl<'a> Iterator for Cameras<'a> {
type Item = Camera<'a>;
fn next(&mut self) -> Option<Self::Item> {
self.iter
.next()
.map(|(index, json)| Camera::new(self.document, index, json))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
fn count(self) -> usize {
self.iter.count()
}
fn last(self) -> Option<Self::Item> {
let document = self.document;
self.iter
.last()
.map(|(index, json)| Camera::new(document, index, json))
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter
.nth(n)
.map(|(index, json)| Camera::new(self.document, index, json))
}
}
impl<'a> ExactSizeIterator for Images<'a> {}
impl<'a> Iterator for Images<'a> {
type Item = Image<'a>;
fn next(&mut self) -> Option<Self::Item> {
self.iter
.next()
.map(|(index, json)| Image::new(self.document, index, json))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
fn count(self) -> usize {
self.iter.count()
}
fn last(self) -> Option<Self::Item> {
let document = self.document;
self.iter
.last()
.map(|(index, json)| Image::new(document, index, json))
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter
.nth(n)
.map(|(index, json)| Image::new(self.document, index, json))
}
}
#[cfg(feature = "KHR_lights_punctual")]
impl<'a> ExactSizeIterator for Lights<'a> {}
#[cfg(feature = "KHR_lights_punctual")]
impl<'a> Iterator for Lights<'a> {
type Item = crate::khr_lights_punctual::Light<'a>;
fn next(&mut self) -> Option<Self::Item> {
self.iter
.next()
.map(|(index, json)| crate::khr_lights_punctual::Light::new(self.document, index, json))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
fn count(self) -> usize {
self.iter.count()
}
fn last(self) -> Option<Self::Item> {
let document = self.document;
self.iter
.last()
.map(|(index, json)| crate::khr_lights_punctual::Light::new(document, index, json))
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter
.nth(n)
.map(|(index, json)| crate::khr_lights_punctual::Light::new(self.document, index, json))
}
}
#[cfg(feature = "KHR_materials_variants")]
impl<'a> ExactSizeIterator for Variants<'a> {}
#[cfg(feature = "KHR_materials_variants")]
impl<'a> Iterator for Variants<'a> {
type Item = crate::khr_materials_variants::Variant<'a>;
fn next(&mut self) -> Option<Self::Item> {
self.iter.next().map(|(index, json)| {
crate::khr_materials_variants::Variant::new(self.document, index, json)
})
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
fn count(self) -> usize {
self.iter.count()
}
fn last(self) -> Option<Self::Item> {
let document = self.document;
self.iter
.last()
.map(|(index, json)| crate::khr_materials_variants::Variant::new(document, index, json))
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter.nth(n).map(|(index, json)| {
crate::khr_materials_variants::Variant::new(self.document, index, json)
})
}
}
impl<'a> ExactSizeIterator for Materials<'a> {}
impl<'a> Iterator for Materials<'a> {
type Item = Material<'a>;
fn next(&mut self) -> Option<Self::Item> {
self.iter
.next()
.map(|(index, json)| Material::new(self.document, index, json))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
fn count(self) -> usize {
self.iter.count()
}
fn last(self) -> Option<Self::Item> {
let document = self.document;
self.iter
.last()
.map(|(index, json)| Material::new(document, index, json))
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter
.nth(n)
.map(|(index, json)| Material::new(self.document, index, json))
}
}
impl<'a> ExactSizeIterator for Meshes<'a> {}
impl<'a> Iterator for Meshes<'a> {
type Item = Mesh<'a>;
fn next(&mut self) -> Option<Self::Item> {
self.iter
.next()
.map(|(index, json)| Mesh::new(self.document, index, json))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
fn count(self) -> usize {
self.iter.count()
}
fn last(self) -> Option<Self::Item> {
let document = self.document;
self.iter
.last()
.map(|(index, json)| Mesh::new(document, index, json))
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter
.nth(n)
.map(|(index, json)| Mesh::new(self.document, index, json))
}
}
impl<'a> ExactSizeIterator for Nodes<'a> {}
impl<'a> Iterator for Nodes<'a> {
type Item = Node<'a>;
fn next(&mut self) -> Option<Self::Item> {
self.iter
.next()
.map(|(index, json)| Node::new(self.document, index, json))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
fn count(self) -> usize {
self.iter.count()
}
fn last(self) -> Option<Self::Item> {
let document = self.document;
self.iter
.last()
.map(|(index, json)| Node::new(document, index, json))
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter
.nth(n)
.map(|(index, json)| Node::new(self.document, index, json))
}
}
impl<'a> ExactSizeIterator for Samplers<'a> {}
impl<'a> Iterator for Samplers<'a> {
type Item = Sampler<'a>;
fn next(&mut self) -> Option<Self::Item> {
self.iter
.next()
.map(|(index, json)| Sampler::new(self.document, index, json))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
fn count(self) -> usize {
self.iter.count()
}
fn last(self) -> Option<Self::Item> {
let document = self.document;
self.iter
.last()
.map(|(index, json)| Sampler::new(document, index, json))
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter
.nth(n)
.map(|(index, json)| Sampler::new(self.document, index, json))
}
}
impl<'a> ExactSizeIterator for Scenes<'a> {}
impl<'a> Iterator for Scenes<'a> {
type Item = Scene<'a>;
fn next(&mut self) -> Option<Self::Item> {
self.iter
.next()
.map(|(index, json)| Scene::new(self.document, index, json))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
fn count(self) -> usize {
self.iter.count()
}
fn last(self) -> Option<Self::Item> {
let document = self.document;
self.iter
.last()
.map(|(index, json)| Scene::new(document, index, json))
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter
.nth(n)
.map(|(index, json)| Scene::new(self.document, index, json))
}
}
impl<'a> ExactSizeIterator for Skins<'a> {}
impl<'a> Iterator for Skins<'a> {
type Item = Skin<'a>;
fn next(&mut self) -> Option<Self::Item> {
self.iter
.next()
.map(|(index, json)| Skin::new(self.document, index, json))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
fn count(self) -> usize {
self.iter.count()
}
fn last(self) -> Option<Self::Item> {
let document = self.document;
self.iter
.last()
.map(|(index, json)| Skin::new(document, index, json))
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter
.nth(n)
.map(|(index, json)| Skin::new(self.document, index, json))
}
}
impl<'a> ExactSizeIterator for Textures<'a> {}
impl<'a> Iterator for Textures<'a> {
type Item = Texture<'a>;
fn next(&mut self) -> Option<Self::Item> {
self.iter
.next()
.map(|(index, json)| Texture::new(self.document, index, json))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
fn count(self) -> usize {
self.iter.count()
}
fn last(self) -> Option<Self::Item> {
let document = self.document;
self.iter
.last()
.map(|(index, json)| Texture::new(document, index, json))
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter
.nth(n)
.map(|(index, json)| Texture::new(self.document, index, json))
}
}

119
vendor/gltf/src/khr_lights_punctual.rs vendored Normal file
View File

@@ -0,0 +1,119 @@
use crate::Document;
use gltf_json::Extras;
/// A light in the scene.
pub struct Light<'a> {
/// The parent `Document` struct.
#[allow(dead_code)]
document: &'a Document,
/// The corresponding JSON index.
index: usize,
/// The corresponding JSON struct.
json: &'a json::extensions::scene::khr_lights_punctual::Light,
}
impl<'a> Light<'a> {
/// Constructs a `Light`.
pub(crate) fn new(
document: &'a Document,
index: usize,
json: &'a json::extensions::scene::khr_lights_punctual::Light,
) -> Self {
Self {
document,
index,
json,
}
}
/// Color of the light source.
pub fn color(&self) -> [f32; 3] {
self.json.color
}
/// Returns the internal JSON index.
pub fn index(&self) -> usize {
self.index
}
/// Optional user-defined name for this object.
#[cfg(feature = "names")]
pub fn name(&self) -> Option<&'a str> {
self.json.name.as_deref()
}
/// Optional application specific data.
pub fn extras(&self) -> &'a Extras {
&self.json.extras
}
/// Intensity of the light source. `point` and `spot` lights use luminous intensity
/// in candela (lm/sr) while `directional` lights use illuminance in lux (lm/m^2).
pub fn intensity(&self) -> f32 {
self.json.intensity
}
/// A distance cutoff at which the light's intensity may be considered to have reached
/// zero.
pub fn range(&self) -> Option<f32> {
self.json.range
}
/// Specifies the light subcategory.
pub fn kind(&self) -> Kind {
use json::extensions::scene::khr_lights_punctual::Type;
match self.json.type_.unwrap() {
Type::Directional => Kind::Directional,
Type::Point => Kind::Point,
Type::Spot => {
let args = self.json.spot.as_ref().unwrap();
Kind::Spot {
inner_cone_angle: args.inner_cone_angle,
outer_cone_angle: args.outer_cone_angle,
}
}
}
}
}
/// Light subcategory.
pub enum Kind {
/// Directional lights are light sources that act as though they are infinitely far away
/// and emit light in the direction of the local -z axis. This light type inherits the
/// orientation of the node that it belongs to; position and scale are ignored except for
/// their effect on the inherited node orientation. Because it is at an infinite distance,
/// the light is not attenuated. Its intensity is defined in lumens per metre squared, or
/// lux (lm/m2).
Directional,
/// Point lights emit light in all directions from their position in space; rotation and
/// scale are ignored except for their effect on the inherited node position. The
/// brightness of the light attenuates in a physically correct manner as distance
/// increases from the light's position (i.e. brightness goes like the inverse square of
/// the distance). Point light intensity is defined in candela, which is lumens per square
/// radian (lm/sr).
Point,
/// Spot lights emit light in a cone in the direction of the local -z axis. The angle and
/// falloff of the cone is defined using two numbers, the `inner_cone_angle` and
/// `outer_cone_angle`. As with point lights, the brightness also attenuates in a
/// physically correct manner as distance increases from the light's position (i.e.
/// brightness goes like the inverse square of the distance). Spot light intensity refers
/// to the brightness inside the `inner_cone_angle` (and at the location of the light) and
/// is defined in candela, which is lumens per square radian (lm/sr). Engines that don't
/// support two angles for spotlights should use `outer_cone_angle` as the spotlight angle
/// (leaving `inner_cone_angle` to implicitly be 0).
///
/// A spot light's position and orientation are inherited from its node transform.
/// Inherited scale does not affect cone shape, and is ignored except for its effect on
/// position and orientation.
Spot {
/// Angle in radians from centre of spotlight where falloff begins.
inner_cone_angle: f32,
/// Angle in radians from centre of spotlight where falloff ends.
outer_cone_angle: f32,
},
}

View File

@@ -0,0 +1,64 @@
use crate::{Document, Material};
/// A variant.
pub struct Variant<'a> {
/// The parent `Document` struct.
#[allow(dead_code)]
document: &'a Document,
/// The corresponding JSON index.
#[allow(dead_code)]
index: usize,
/// The corresponding JSON struct.
json: &'a json::extensions::scene::khr_materials_variants::Variant,
}
impl<'a> Variant<'a> {
/// Constructs a `Variant`.
pub(crate) fn new(
document: &'a Document,
index: usize,
json: &'a json::extensions::scene::khr_materials_variants::Variant,
) -> Self {
Self {
document,
index,
json,
}
}
/// Name of the variant.
pub fn name(&self) -> &'a str {
&self.json.name
}
}
/// A mapping.
pub struct Mapping<'a> {
/// The parent `Document` struct.
document: &'a Document,
/// The corresponding JSON struct.
json: &'a json::extensions::mesh::Mapping,
}
impl<'a> Mapping<'a> {
/// Constructs a `Mapping`.
pub(crate) fn new(document: &'a Document, json: &'a json::extensions::mesh::Mapping) -> Self {
Self { document, json }
}
/// Get the variant indices that use this material.
pub fn variants(&self) -> &'a [u32] {
&self.json.variants
}
/// Get the corresponding material.
pub fn material(&self) -> Material<'a> {
self.document
.materials()
.nth(self.json.material as usize)
.unwrap_or_else(|| Material::default(self.document))
}
}

842
vendor/gltf/src/lib.rs vendored Normal file
View File

@@ -0,0 +1,842 @@
#![deny(missing_docs)]
#![allow(unknown_lints)]
#![cfg_attr(docsrs, feature(doc_cfg))]
//! glTF 2.0 loader
//!
//! This crate is intended to load [glTF 2.0], a file format designed for the
//! efficient runtime transmission of 3D scenes. The crate aims to provide
//! rustic utilities that make working with glTF simple and intuitive.
//!
//! # Installation
//!
//! Add `gltf` to your `Cargo.toml`:
//!
//! ```toml
//! [dependencies.gltf]
//! version = "1"
//! ```
//!
//! # Examples
//!
//! ## Basic usage
//!
//! Walking the node hierarchy.
//!
//! ```
//! # fn run() -> Result<(), Box<dyn std::error::Error>> {
//! # use gltf::Gltf;
//! let gltf = Gltf::open("examples/Box.gltf")?;
//! for scene in gltf.scenes() {
//! for node in scene.nodes() {
//! println!(
//! "Node #{} has {} children",
//! node.index(),
//! node.children().count(),
//! );
//! }
//! }
//! # Ok(())
//! # }
//! # fn main() {
//! # let _ = run().expect("runtime error");
//! # }
//! ```
//!
//! ## Import function
//!
//! Reading a glTF document plus its buffers and images from the
//! file system.
//!
//! ```
//! # fn run() -> Result<(), Box<dyn std::error::Error>> {
//! let (document, buffers, images) = gltf::import("examples/Box.gltf")?;
//! assert_eq!(buffers.len(), document.buffers().count());
//! assert_eq!(images.len(), document.images().count());
//! # Ok(())
//! # }
//! # fn main() {
//! # let _ = run().expect("runtime error");
//! # }
//! ```
//!
//! ### Note
//!
//! This function is provided as a convenience for loading glTF and associated
//! resources from the file system. It is suitable for real world use but may
//! not be suitable for all real world use cases. More complex import scenarios
//! such downloading from web URLs are not handled by this function. These
//! scenarios are delegated to the user.
//!
//! You can read glTF without loading resources by constructing the [`Gltf`]
//! (standard glTF) or [`Glb`] (binary glTF) data structures explicitly. Buffer
//! and image data can then be imported separately using [`import_buffers`] and
//! [`import_images`] respectively.
//!
//! [glTF 2.0]: https://www.khronos.org/gltf
//! [`Gltf`]: struct.Gltf.html
//! [`Glb`]: struct.Glb.html
//! [`Node`]: struct.Node.html
//! [`Scene`]: struct.Scene.html
#[cfg(test)]
#[macro_use]
extern crate approx;
#[cfg(feature = "import")]
extern crate image as image_crate;
#[macro_use]
extern crate lazy_static;
/// Contains (de)serializable data structures that match the glTF JSON text.
pub extern crate gltf_json as json;
/// Accessors for reading vertex attributes from buffer views.
pub mod accessor;
/// Animations, their channels, targets, and samplers.
pub mod animation;
/// Primitives for working with binary glTF.
pub mod binary;
/// Buffers and buffer views.
pub mod buffer;
/// Cameras and their projections.
pub mod camera;
/// Images that may be used by textures.
pub mod image;
/// The reference importer.
#[cfg(feature = "import")]
#[cfg_attr(docsrs, doc(cfg(feature = "import")))]
mod import;
/// Iterators for walking the glTF node hierarchy.
pub mod iter;
/// Support for the `KHR_lights_punctual` extension.
#[cfg(feature = "KHR_lights_punctual")]
#[cfg_attr(docsrs, doc(cfg(feature = "KHR_lights_punctual")))]
pub mod khr_lights_punctual;
/// Support for the `KHR_materials_variants` extension.
#[cfg(feature = "KHR_materials_variants")]
#[cfg_attr(docsrs, doc(cfg(feature = "KHR_materials_variants")))]
pub mod khr_materials_variants;
/// Material properties of primitives.
pub mod material;
/// For internal use.
mod math;
/// Meshes and their primitives.
pub mod mesh;
/// The glTF node heirarchy.
pub mod scene;
/// Mesh skinning primitives.
pub mod skin;
/// Textures and their samplers.
pub mod texture;
#[cfg(feature = "extensions")]
use json::Value;
#[cfg(feature = "extensions")]
use serde_json::Map;
#[doc(inline)]
pub use self::accessor::Accessor;
#[doc(inline)]
pub use self::animation::Animation;
#[doc(inline)]
pub use self::binary::Glb;
#[doc(inline)]
pub use self::buffer::Buffer;
#[doc(inline)]
pub use self::camera::Camera;
#[doc(inline)]
pub use self::image::Image;
#[cfg(feature = "import")]
#[doc(inline)]
pub use self::import::import;
#[cfg(feature = "import")]
#[doc(inline)]
pub use self::import::import_buffers;
#[cfg(feature = "import")]
#[doc(inline)]
pub use self::import::import_images;
#[cfg(feature = "import")]
#[doc(inline)]
pub use self::import::import_slice;
#[doc(inline)]
pub use self::material::Material;
#[doc(inline)]
pub use self::mesh::{Attribute, Mesh, Primitive, Semantic};
#[doc(inline)]
pub use self::scene::{Node, Scene};
#[doc(inline)]
pub use self::skin::Skin;
#[doc(inline)]
pub use self::texture::Texture;
use std::path::Path;
use std::{fs, io, ops, result};
pub(crate) trait Normalize<T> {
fn normalize(self) -> T;
}
/// Result type for convenience.
pub type Result<T> = result::Result<T, Error>;
/// Represents a runtime error.
#[derive(Debug)]
pub enum Error {
/// Base 64 decoding error.
#[cfg(feature = "import")]
#[cfg_attr(docsrs, doc(cfg(feature = "import")))]
Base64(base64::DecodeError),
/// GLB parsing error.
Binary(binary::Error),
/// Buffer length does not match expected length.
#[cfg(feature = "import")]
#[cfg_attr(docsrs, doc(cfg(feature = "import")))]
BufferLength {
/// The index of the offending buffer.
buffer: usize,
/// The expected buffer length in bytes.
expected: usize,
/// The number of bytes actually available.
actual: usize,
},
/// JSON deserialization error.
Deserialize(json::Error),
/// Standard I/O error.
Io(std::io::Error),
/// Image decoding error.
#[cfg(feature = "import")]
#[cfg_attr(docsrs, doc(cfg(feature = "import")))]
Image(image_crate::ImageError),
/// The `BIN` chunk of binary glTF is referenced but does not exist.
#[cfg(feature = "import")]
#[cfg_attr(docsrs, doc(cfg(feature = "import")))]
MissingBlob,
/// An external file is referenced in a slice only import without path
#[cfg(feature = "import")]
#[cfg_attr(docsrs, doc(cfg(feature = "import")))]
ExternalReferenceInSliceImport,
/// Unsupported image encoding.
#[cfg(feature = "import")]
#[cfg_attr(docsrs, doc(cfg(feature = "import")))]
UnsupportedImageEncoding,
/// Unsupported image format.
#[cfg(feature = "import")]
#[cfg_attr(docsrs, doc(cfg(feature = "import")))]
UnsupportedImageFormat(image_crate::DynamicImage),
/// Unsupported URI scheme.
#[cfg(feature = "import")]
#[cfg_attr(docsrs, doc(cfg(feature = "import")))]
UnsupportedScheme,
/// glTF validation error.
Validation(Vec<(json::Path, json::validation::Error)>),
}
/// glTF JSON wrapper plus binary payload.
#[derive(Clone, Debug)]
pub struct Gltf {
/// The glTF JSON wrapper.
pub document: Document,
/// The glTF binary payload in the case of binary glTF.
pub blob: Option<Vec<u8>>,
}
/// glTF JSON wrapper.
#[derive(Clone, Debug)]
pub struct Document(json::Root);
impl Gltf {
/// Convenience function that loads glTF from the file system.
pub fn open<P>(path: P) -> Result<Self>
where
P: AsRef<Path>,
{
let file = fs::File::open(path)?;
let reader = io::BufReader::new(file);
let gltf = Self::from_reader(reader)?;
Ok(gltf)
}
/// Loads glTF from a reader without performing validation checks.
pub fn from_reader_without_validation<R>(mut reader: R) -> Result<Self>
where
R: io::Read + io::Seek,
{
let mut magic = [0u8; 4];
reader.read_exact(&mut magic)?;
reader.seek(io::SeekFrom::Current(-4))?;
let (json, blob): (json::Root, Option<Vec<u8>>);
if magic.starts_with(b"glTF") {
let mut glb = binary::Glb::from_reader(reader)?;
// TODO: use `json::from_reader` instead of `json::from_slice`
json = json::deserialize::from_slice(&glb.json)?;
blob = glb.bin.take().map(|x| x.into_owned());
} else {
json = json::deserialize::from_reader(reader)?;
blob = None;
};
let document = Document::from_json_without_validation(json);
Ok(Gltf { document, blob })
}
/// Loads glTF from a reader.
pub fn from_reader<R>(reader: R) -> Result<Self>
where
R: io::Read + io::Seek,
{
let gltf = Self::from_reader_without_validation(reader)?;
gltf.document.validate()?;
Ok(gltf)
}
/// Loads glTF from a slice of bytes without performing validation
/// checks.
pub fn from_slice_without_validation(slice: &[u8]) -> Result<Self> {
let (json, blob): (json::Root, Option<Vec<u8>>);
if slice.starts_with(b"glTF") {
let mut glb = binary::Glb::from_slice(slice)?;
json = json::deserialize::from_slice(&glb.json)?;
blob = glb.bin.take().map(|x| x.into_owned());
} else {
json = json::deserialize::from_slice(slice)?;
blob = None;
};
let document = Document::from_json_without_validation(json);
Ok(Gltf { document, blob })
}
/// Loads glTF from a slice of bytes.
pub fn from_slice(slice: &[u8]) -> Result<Self> {
let gltf = Self::from_slice_without_validation(slice)?;
gltf.document.validate()?;
Ok(gltf)
}
}
impl ops::Deref for Gltf {
type Target = Document;
fn deref(&self) -> &Self::Target {
&self.document
}
}
impl ops::DerefMut for Gltf {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.document
}
}
impl Document {
/// Loads glTF from pre-deserialized JSON.
pub fn from_json(json: json::Root) -> Result<Self> {
let document = Self::from_json_without_validation(json);
document.validate()?;
Ok(document)
}
/// Loads glTF from pre-deserialized JSON without performing
/// validation checks.
pub fn from_json_without_validation(json: json::Root) -> Self {
Document(json)
}
/// Unwraps the glTF document.
pub fn into_json(self) -> json::Root {
self.0
}
/// Unwraps the glTF document, without consuming it.
pub fn as_json(&self) -> &json::Root {
&self.0
}
/// Perform validation checks on loaded glTF.
pub(crate) fn validate(&self) -> Result<()> {
use json::validation::Validate;
let mut errors = Vec::new();
self.0
.validate(&self.0, json::Path::new, &mut |path, error| {
errors.push((path(), error))
});
if errors.is_empty() {
Ok(())
} else {
Err(Error::Validation(errors))
}
}
/// Returns an `Iterator` that visits the accessors of the glTF asset.
pub fn accessors(&self) -> iter::Accessors {
iter::Accessors {
iter: self.0.accessors.iter().enumerate(),
document: self,
}
}
/// Returns an `Iterator` that visits the animations of the glTF asset.
pub fn animations(&self) -> iter::Animations {
iter::Animations {
iter: self.0.animations.iter().enumerate(),
document: self,
}
}
/// Returns an `Iterator` that visits the pre-loaded buffers of the glTF asset.
pub fn buffers(&self) -> iter::Buffers {
iter::Buffers {
iter: self.0.buffers.iter().enumerate(),
document: self,
}
}
/// Returns an `Iterator` that visits the cameras of the glTF asset.
pub fn cameras(&self) -> iter::Cameras {
iter::Cameras {
iter: self.0.cameras.iter().enumerate(),
document: self,
}
}
/// Returns the default scene, if provided.
pub fn default_scene(&self) -> Option<Scene> {
self.0
.scene
.as_ref()
.map(|index| self.scenes().nth(index.value()).unwrap())
}
/// Returns the extensions referenced in this .document file.
pub fn extensions_used(&self) -> iter::ExtensionsUsed {
iter::ExtensionsUsed(self.0.extensions_used.iter())
}
/// Returns the extensions required to load and render this asset.
pub fn extensions_required(&self) -> iter::ExtensionsRequired {
iter::ExtensionsRequired(self.0.extensions_required.iter())
}
/// Returns an `Iterator` that visits the pre-loaded images of the glTF asset.
pub fn images(&self) -> iter::Images {
iter::Images {
iter: self.0.images.iter().enumerate(),
document: self,
}
}
/// Returns extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extensions(&self) -> Option<&Map<String, Value>> {
let root = self.0.extensions.as_ref()?;
Some(&root.others)
}
/// Queries extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extension_value(&self, ext_name: &str) -> Option<&Value> {
let root = self.0.extensions.as_ref()?;
root.others.get(ext_name)
}
/// Returns an `Iterator` that visits the lights of the glTF asset as defined by the
/// `KHR_lights_punctual` extension.
#[cfg(feature = "KHR_lights_punctual")]
#[cfg_attr(docsrs, doc(cfg(feature = "KHR_lights_punctual")))]
pub fn lights(&self) -> Option<iter::Lights> {
let iter = self
.0
.extensions
.as_ref()?
.khr_lights_punctual
.as_ref()?
.lights
.iter()
.enumerate();
Some(iter::Lights {
iter,
document: self,
})
}
/// Returns an `Iterator` that visits the variants of the glTF asset as defined by the
/// `KHR_materials_variants` extension.
#[cfg(feature = "KHR_materials_variants")]
#[cfg_attr(docsrs, doc(cfg(feature = "KHR_materials_variants")))]
pub fn variants(&self) -> Option<iter::Variants> {
let iter = self
.0
.extensions
.as_ref()?
.khr_materials_variants
.as_ref()?
.variants
.iter()
.enumerate();
Some(iter::Variants {
iter,
document: self,
})
}
/// Returns an `Iterator` that visits the materials of the glTF asset.
pub fn materials(&self) -> iter::Materials {
iter::Materials {
iter: self.0.materials.iter().enumerate(),
document: self,
}
}
/// Returns an `Iterator` that visits the meshes of the glTF asset.
pub fn meshes(&self) -> iter::Meshes {
iter::Meshes {
iter: self.0.meshes.iter().enumerate(),
document: self,
}
}
/// Returns an `Iterator` that visits the nodes of the glTF asset.
pub fn nodes(&self) -> iter::Nodes {
iter::Nodes {
iter: self.0.nodes.iter().enumerate(),
document: self,
}
}
/// Returns an `Iterator` that visits the samplers of the glTF asset.
pub fn samplers(&self) -> iter::Samplers {
iter::Samplers {
iter: self.0.samplers.iter().enumerate(),
document: self,
}
}
/// Returns an `Iterator` that visits the scenes of the glTF asset.
pub fn scenes(&self) -> iter::Scenes {
iter::Scenes {
iter: self.0.scenes.iter().enumerate(),
document: self,
}
}
/// Returns an `Iterator` that visits the skins of the glTF asset.
pub fn skins(&self) -> iter::Skins {
iter::Skins {
iter: self.0.skins.iter().enumerate(),
document: self,
}
}
/// Returns an `Iterator` that visits the textures of the glTF asset.
pub fn textures(&self) -> iter::Textures {
iter::Textures {
iter: self.0.textures.iter().enumerate(),
document: self,
}
}
/// Returns an `Iterator` that visits the pre-loaded buffer views of the glTF
/// asset.
pub fn views(&self) -> iter::Views {
iter::Views {
iter: self.0.buffer_views.iter().enumerate(),
document: self,
}
}
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
#[cfg(feature = "import")]
Error::Base64(ref e) => e.fmt(f),
Error::Binary(ref e) => e.fmt(f),
#[cfg(feature = "import")]
Error::BufferLength {
buffer,
expected,
actual,
} => {
write!(
f,
"buffer {}: expected {} bytes but received {} bytes",
buffer, expected, actual
)
}
Error::Deserialize(ref e) => e.fmt(f),
Error::Io(ref e) => e.fmt(f),
#[cfg(feature = "import")]
Error::Image(ref e) => e.fmt(f),
#[cfg(feature = "import")]
Error::MissingBlob => write!(f, "missing binary portion of binary glTF"),
#[cfg(feature = "import")]
Error::ExternalReferenceInSliceImport => {
write!(f, "external reference in slice only import")
}
#[cfg(feature = "import")]
Error::UnsupportedImageEncoding => write!(f, "unsupported image encoding"),
#[cfg(feature = "import")]
Error::UnsupportedImageFormat(image) => {
write!(f, "unsupported image format: {:?}", image.color())
}
#[cfg(feature = "import")]
Error::UnsupportedScheme => write!(f, "unsupported URI scheme"),
Error::Validation(ref xs) => {
write!(f, "invalid glTF:")?;
for (ref path, ref error) in xs {
write!(f, " {}: {};", path, error)?;
}
Ok(())
}
}
}
}
impl std::error::Error for Error {}
impl From<binary::Error> for Error {
fn from(err: binary::Error) -> Self {
Error::Binary(err)
}
}
impl From<std::io::Error> for Error {
fn from(err: std::io::Error) -> Self {
Error::Io(err)
}
}
#[cfg(feature = "import")]
impl From<image_crate::ImageError> for Error {
fn from(err: image_crate::ImageError) -> Self {
Error::Image(err)
}
}
impl From<json::Error> for Error {
fn from(err: json::Error) -> Self {
Error::Deserialize(err)
}
}
impl From<Vec<(json::Path, json::validation::Error)>> for Error {
fn from(errs: Vec<(json::Path, json::validation::Error)>) -> Self {
Error::Validation(errs)
}
}
impl Normalize<i8> for i8 {
fn normalize(self) -> i8 {
self
}
}
impl Normalize<u8> for i8 {
fn normalize(self) -> u8 {
self.max(0) as u8 * 2
}
}
impl Normalize<i16> for i8 {
fn normalize(self) -> i16 {
self as i16 * 0x100
}
}
impl Normalize<u16> for i8 {
fn normalize(self) -> u16 {
self.max(0) as u16 * 0x200
}
}
impl Normalize<f32> for i8 {
fn normalize(self) -> f32 {
(self as f32 * 127.0_f32.recip()).max(-1.0)
}
}
impl Normalize<i8> for u8 {
fn normalize(self) -> i8 {
(self / 2) as i8
}
}
impl Normalize<u8> for u8 {
fn normalize(self) -> u8 {
self
}
}
impl Normalize<i16> for u8 {
fn normalize(self) -> i16 {
self as i16 * 0x80
}
}
impl Normalize<u16> for u8 {
fn normalize(self) -> u16 {
self as u16 * 0x100
}
}
impl Normalize<f32> for u8 {
fn normalize(self) -> f32 {
self as f32 * 255.0_f32.recip()
}
}
impl Normalize<i8> for i16 {
fn normalize(self) -> i8 {
(self / 0x100) as i8
}
}
impl Normalize<u8> for i16 {
fn normalize(self) -> u8 {
(self.max(0) / 0x80) as u8
}
}
impl Normalize<i16> for i16 {
fn normalize(self) -> i16 {
self
}
}
impl Normalize<u16> for i16 {
fn normalize(self) -> u16 {
self.max(0) as u16 * 2
}
}
impl Normalize<f32> for i16 {
fn normalize(self) -> f32 {
(self as f32 * 32767.0_f32.recip()).max(-1.0)
}
}
impl Normalize<i8> for u16 {
fn normalize(self) -> i8 {
(self / 0x200) as i8
}
}
impl Normalize<u8> for u16 {
fn normalize(self) -> u8 {
(self / 0x100) as u8
}
}
impl Normalize<i16> for u16 {
fn normalize(self) -> i16 {
(self / 2) as i16
}
}
impl Normalize<u16> for u16 {
fn normalize(self) -> u16 {
self
}
}
impl Normalize<f32> for u16 {
fn normalize(self) -> f32 {
self as f32 * 65535.0_f32.recip()
}
}
impl Normalize<i8> for f32 {
fn normalize(self) -> i8 {
(self * 127.0) as i8
}
}
impl Normalize<u8> for f32 {
fn normalize(self) -> u8 {
(self.max(0.0) * 255.0) as u8
}
}
impl Normalize<i16> for f32 {
fn normalize(self) -> i16 {
(self * 32767.0) as i16
}
}
impl Normalize<u16> for f32 {
fn normalize(self) -> u16 {
(self.max(0.0) * 65535.0) as u16
}
}
impl Normalize<f32> for f32 {
fn normalize(self) -> f32 {
self
}
}
impl<U, T> Normalize<[T; 2]> for [U; 2]
where
U: Normalize<T> + Copy,
{
fn normalize(self) -> [T; 2] {
[self[0].normalize(), self[1].normalize()]
}
}
impl<U, T> Normalize<[T; 3]> for [U; 3]
where
U: Normalize<T> + Copy,
{
fn normalize(self) -> [T; 3] {
[
self[0].normalize(),
self[1].normalize(),
self[2].normalize(),
]
}
}
impl<U, T> Normalize<[T; 4]> for [U; 4]
where
U: Normalize<T> + Copy,
{
fn normalize(self) -> [T; 4] {
[
self[0].normalize(),
self[1].normalize(),
self[2].normalize(),
self[3].normalize(),
]
}
}

700
vendor/gltf/src/material.rs vendored Normal file
View File

@@ -0,0 +1,700 @@
use crate::{texture, Document};
pub use json::material::AlphaMode;
#[cfg(feature = "extensions")]
use serde_json::{Map, Value};
lazy_static! {
static ref DEFAULT_MATERIAL: json::material::Material = Default::default();
}
/// The material appearance of a primitive.
#[derive(Clone, Debug)]
pub struct Material<'a> {
/// The parent `Document` struct.
document: &'a Document,
/// The corresponding JSON index - `None` when the default material.
index: Option<usize>,
/// The corresponding JSON struct.
json: &'a json::material::Material,
}
impl<'a> Material<'a> {
/// Constructs a `Material`.
pub(crate) fn new(
document: &'a Document,
index: usize,
json: &'a json::material::Material,
) -> Self {
Self {
document,
index: Some(index),
json,
}
}
/// Constructs the default `Material`.
pub(crate) fn default(document: &'a Document) -> Self {
Self {
document,
index: None,
json: &DEFAULT_MATERIAL,
}
}
/// Returns the internal JSON index if this `Material` was explicity defined.
///
/// This function returns `None` if the `Material` is the default material.
pub fn index(&self) -> Option<usize> {
self.index
}
/// The optional alpha cutoff value of the material.
pub fn alpha_cutoff(&self) -> Option<f32> {
self.json.alpha_cutoff.map(|value| value.0)
}
/// The alpha rendering mode of the material. The material's alpha rendering
/// mode enumeration specifying the interpretation of the alpha value of the main
/// factor and texture.
///
/// * In `Opaque` mode (default) the alpha value is ignored
/// and the rendered output is fully opaque.
/// * In `Mask` mode, the rendered
/// output is either fully opaque or fully transparent depending on the alpha
/// value and the specified alpha cutoff value.
/// * In `Blend` mode, the alpha value is used to composite the source and
/// destination areas and the rendered output is combined with the background
/// using the normal painting operation (i.e. the Porter and Duff over
/// operator).
pub fn alpha_mode(&self) -> AlphaMode {
self.json.alpha_mode.unwrap()
}
/// Specifies whether the material is double-sided.
///
/// * When this value is false, back-face culling is enabled.
/// * When this value is true, back-face culling is disabled and double sided
/// lighting is enabled. The back-face must have its normals reversed before
/// the lighting equation is evaluated.
pub fn double_sided(&self) -> bool {
self.json.double_sided
}
/// Optional user-defined name for this object.
#[cfg(feature = "names")]
#[cfg_attr(docsrs, doc(cfg(feature = "names")))]
pub fn name(&self) -> Option<&'a str> {
self.json.name.as_deref()
}
/// Parameter values that define the metallic-roughness material model from
/// Physically-Based Rendering (PBR) methodology.
pub fn pbr_metallic_roughness(&self) -> PbrMetallicRoughness<'a> {
PbrMetallicRoughness::new(self.document, &self.json.pbr_metallic_roughness)
}
/// Returns extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extensions(&self) -> Option<&Map<String, Value>> {
let ext = self.json.extensions.as_ref()?;
Some(&ext.others)
}
/// Get the value of an extension based on the name of the extension
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extension_value(&self, key: &str) -> Option<&Value> {
let ext = self.json.extensions.as_ref()?;
ext.others.get(key)
}
/// Parameter values that define the specular-glossiness material model from
/// Physically-Based Rendering (PBR) methodology.
#[cfg(feature = "KHR_materials_pbrSpecularGlossiness")]
#[cfg_attr(docsrs, doc(cfg(feature = "KHR_materials_pbrSpecularGlossiness")))]
pub fn pbr_specular_glossiness(&self) -> Option<PbrSpecularGlossiness<'a>> {
self.json
.extensions
.as_ref()?
.pbr_specular_glossiness
.as_ref()
.map(|x| PbrSpecularGlossiness::new(self.document, x))
}
/// Parameter values that define the transmission of light through the material
#[cfg(feature = "KHR_materials_transmission")]
#[cfg_attr(docsrs, doc(cfg(feature = "KHR_materials_transmission")))]
pub fn transmission(&self) -> Option<Transmission<'a>> {
self.json
.extensions
.as_ref()?
.transmission
.as_ref()
.map(|x| Transmission::new(self.document, x))
}
/// Parameter values that define the index of refraction of the material
#[cfg(feature = "KHR_materials_ior")]
#[cfg_attr(docsrs, doc(cfg(feature = "KHR_materials_ior")))]
pub fn ior(&self) -> Option<f32> {
self.json.extensions.as_ref()?.ior.as_ref().map(|x| x.ior.0)
}
/// Parameter value that adjusts the strength of emissive material properties
#[cfg(feature = "KHR_materials_emissive_strength")]
#[cfg_attr(docsrs, doc(cfg(feature = "KHR_materials_emissive_strength")))]
pub fn emissive_strength(&self) -> Option<f32> {
self.json
.extensions
.as_ref()?
.emissive_strength
.as_ref()
.map(|x| x.emissive_strength.0)
}
/// Parameter values that define a volume for the transmission of light through the material
#[cfg(feature = "KHR_materials_volume")]
#[cfg_attr(docsrs, doc(cfg(feature = "KHR_materials_volume")))]
pub fn volume(&self) -> Option<Volume<'a>> {
self.json
.extensions
.as_ref()?
.volume
.as_ref()
.map(|x| Volume::new(self.document, x))
}
/// Parameter values that define the strength and colour of the specular reflection of the material
#[cfg(feature = "KHR_materials_specular")]
#[cfg_attr(docsrs, doc(cfg(feature = "KHR_materials_specular")))]
pub fn specular(&self) -> Option<Specular<'a>> {
self.json
.extensions
.as_ref()?
.specular
.as_ref()
.map(|x| Specular::new(self.document, x))
}
/// A tangent space normal map.
///
/// The texture contains RGB components in linear space. Each texel represents
/// the XYZ components of a normal vector in tangent space.
///
/// * Red [0 to 255] maps to X [-1 to 1].
/// * Green [0 to 255] maps to Y [-1 to 1].
/// * Blue [128 to 255] maps to Z [1/255 to 1].
///
/// The normal vectors use OpenGL conventions where +X is right, +Y is up, and
/// +Z points toward the viewer.
pub fn normal_texture(&self) -> Option<NormalTexture<'a>> {
self.json.normal_texture.as_ref().map(|json| {
let texture = self.document.textures().nth(json.index.value()).unwrap();
NormalTexture::new(texture, json)
})
}
/// The occlusion map texture.
///
/// The occlusion values are sampled from the R channel. Higher values indicate
/// areas that should receive full indirect lighting and lower values indicate
/// no indirect lighting. These values are linear.
///
/// If other channels are present (GBA), they are ignored for occlusion
/// calculations.
pub fn occlusion_texture(&self) -> Option<OcclusionTexture<'a>> {
self.json.occlusion_texture.as_ref().map(|json| {
let texture = self.document.textures().nth(json.index.value()).unwrap();
OcclusionTexture::new(texture, json)
})
}
/// The emissive map texture.
///
/// The emissive map controls the color and intensity of the light being
/// emitted by the material.
///
/// This texture contains RGB components in sRGB color space. If a fourth
/// component (A) is present, it is ignored.
pub fn emissive_texture(&self) -> Option<texture::Info<'a>> {
self.json.emissive_texture.as_ref().map(|json| {
let texture = self.document.textures().nth(json.index.value()).unwrap();
texture::Info::new(texture, json)
})
}
/// The emissive color of the material.
///
/// The default value is `[0.0, 0.0, 0.0]`.
pub fn emissive_factor(&self) -> [f32; 3] {
self.json.emissive_factor.0
}
/// Specifies whether the material is unlit.
///
/// Returns `true` if the [`KHR_materials_unlit`] property was specified, in which
/// case the renderer should prefer to ignore all PBR values except `baseColor`.
///
/// [`KHR_materials_unlit`]: https://github.com/KhronosGroup/glTF/tree/master/extensions/2.0/Khronos/KHR_materials_unlit#overview
#[cfg(feature = "KHR_materials_unlit")]
#[cfg_attr(docsrs, doc(cfg(feature = "KHR_materials_unlit")))]
pub fn unlit(&self) -> bool {
self.json
.extensions
.as_ref()
.map_or(false, |extensions| extensions.unlit.is_some())
}
/// Optional application specific data.
pub fn extras(&self) -> &'a json::Extras {
&self.json.extras
}
}
/// A set of parameter values that are used to define the metallic-roughness
/// material model from Physically-Based Rendering (PBR) methodology.
pub struct PbrMetallicRoughness<'a> {
/// The parent `Document` struct.
document: &'a Document,
/// The corresponding JSON struct.
json: &'a json::material::PbrMetallicRoughness,
}
impl<'a> PbrMetallicRoughness<'a> {
/// Constructs `PbrMetallicRoughness`.
pub(crate) fn new(
document: &'a Document,
json: &'a json::material::PbrMetallicRoughness,
) -> Self {
Self { document, json }
}
/// Returns the material's base color factor.
///
/// The default value is `[1.0, 1.0, 1.0, 1.0]`.
pub fn base_color_factor(&self) -> [f32; 4] {
self.json.base_color_factor.0
}
/// Returns the base color texture. The texture contains RGB(A) components
/// in sRGB color space.
pub fn base_color_texture(&self) -> Option<texture::Info<'a>> {
self.json.base_color_texture.as_ref().map(|json| {
let texture = self.document.textures().nth(json.index.value()).unwrap();
texture::Info::new(texture, json)
})
}
/// Returns the metalness factor of the material.
///
/// The default value is `1.0`.
pub fn metallic_factor(&self) -> f32 {
self.json.metallic_factor.0
}
/// Returns the roughness factor of the material.
///
/// * A value of 1.0 means the material is completely rough.
/// * A value of 0.0 means the material is completely smooth.
///
/// The default value is `1.0`.
pub fn roughness_factor(&self) -> f32 {
self.json.roughness_factor.0
}
/// The metallic-roughness texture.
///
/// The metalness values are sampled from the B channel.
/// The roughness values are sampled from the G channel.
/// These values are linear. If other channels are present (R or A),
/// they are ignored for metallic-roughness calculations.
pub fn metallic_roughness_texture(&self) -> Option<texture::Info<'a>> {
self.json.metallic_roughness_texture.as_ref().map(|json| {
let texture = self.document.textures().nth(json.index.value()).unwrap();
texture::Info::new(texture, json)
})
}
/// Returns extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extensions(&self) -> Option<&Map<String, Value>> {
let ext = self.json.extensions.as_ref()?;
Some(&ext.others)
}
/// Get the value of an extension based on the name of the extension
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extension_value(&self, key: &str) -> Option<&Value> {
let ext = self.json.extensions.as_ref()?;
ext.others.get(key)
}
/// Optional application specific data.
pub fn extras(&self) -> &'a json::Extras {
&self.json.extras
}
}
/// A set of parameter values that are used to define the transmissions
/// factor of the material
#[cfg(feature = "KHR_materials_transmission")]
#[cfg_attr(docsrs, doc(cfg(feature = "KHR_materials_transmission")))]
pub struct Transmission<'a> {
/// The parent `Document` struct.
document: &'a Document,
/// The corresponding JSON struct.
json: &'a json::extensions::material::Transmission,
}
#[cfg(feature = "KHR_materials_transmission")]
#[cfg_attr(docsrs, doc(cfg(feature = "KHR_materials_transmission")))]
impl<'a> Transmission<'a> {
/// Constructs `Ior`.
pub(crate) fn new(
document: &'a Document,
json: &'a json::extensions::material::Transmission,
) -> Self {
Self { document, json }
}
/// Returns the material's transmission factor.
///
/// The default value is `0.0`.
pub fn transmission_factor(&self) -> f32 {
self.json.transmission_factor.0
}
/// Returns the transmission texture.
pub fn transmission_texture(&self) -> Option<texture::Info<'a>> {
self.json.transmission_texture.as_ref().map(|json| {
let texture = self.document.textures().nth(json.index.value()).unwrap();
texture::Info::new(texture, json)
})
}
/// Optional application specific data.
pub fn extras(&self) -> &'a json::Extras {
&self.json.extras
}
}
/// Parameter values that define a volume for the transmission of light through the material
#[cfg(feature = "KHR_materials_volume")]
#[cfg_attr(docsrs, doc(cfg(feature = "KHR_materials_volume")))]
pub struct Volume<'a> {
/// The parent `Document` struct.
document: &'a Document,
/// The corresponding JSON struct.
json: &'a json::extensions::material::Volume,
}
#[cfg(feature = "KHR_materials_volume")]
#[cfg_attr(docsrs, doc(cfg(feature = "KHR_materials_volume")))]
impl<'a> Volume<'a> {
/// Constructs `Volume`.
pub(crate) fn new(
document: &'a Document,
json: &'a json::extensions::material::Volume,
) -> Self {
Self { document, json }
}
/// The thickness of the volume beneath the surface. The value is
/// given in the coordinate space of the mesh. If the value is 0
/// the material is thin-walled. Otherwise the material is a
/// volume boundary. The `doubleSided` property has no effect on
/// volume boundaries. Range is [0, +inf).
pub fn thickness_factor(&self) -> f32 {
self.json.thickness_factor.0
}
/// A texture that defines the thickness, stored in the G channel.
/// This will be multiplied by `thickness_factor`. Range is [0, 1].
pub fn thickness_texture(&self) -> Option<texture::Info<'a>> {
self.json.thickness_texture.as_ref().map(|json| {
let texture = self.document.textures().nth(json.index.value()).unwrap();
texture::Info::new(texture, json)
})
}
/// Density of the medium given as the average distance that light
/// travels in the medium before interacting with a particle. The
/// value is given in world space. Range is (0, +inf).
pub fn attenuation_distance(&self) -> f32 {
self.json.attenuation_distance.0
}
/// The color that white light turns into due to absorption when
/// reaching the attenuation distance.
pub fn attenuation_color(&self) -> [f32; 3] {
self.json.attenuation_color.0
}
/// Optional application specific data.
pub fn extras(&self) -> &'a json::Extras {
&self.json.extras
}
}
/// Parameter values that define the strength and colour of the specular reflection of the material
#[cfg(feature = "KHR_materials_specular")]
#[cfg_attr(docsrs, doc(cfg(feature = "KHR_materials_specular")))]
pub struct Specular<'a> {
/// The parent `Document` struct.
document: &'a Document,
/// The corresponding JSON struct.
json: &'a json::extensions::material::Specular,
}
#[cfg(feature = "KHR_materials_specular")]
#[cfg_attr(docsrs, doc(cfg(feature = "KHR_materials_specular")))]
impl<'a> Specular<'a> {
/// Constructs `Volume`.
pub(crate) fn new(
document: &'a Document,
json: &'a json::extensions::material::Specular,
) -> Self {
Self { document, json }
}
/// The strength of the specular reflection.
pub fn specular_factor(&self) -> f32 {
self.json.specular_factor.0
}
/// A texture that defines the strength of the specular reflection,
/// stored in the alpha (`A`) channel. This will be multiplied by
/// `specular_factor`.
pub fn specular_texture(&self) -> Option<texture::Info<'a>> {
self.json.specular_texture.as_ref().map(|json| {
let texture = self.document.textures().nth(json.index.value()).unwrap();
texture::Info::new(texture, json)
})
}
/// The F0 color of the specular reflection (linear RGB).
pub fn specular_color_factor(&self) -> [f32; 3] {
self.json.specular_color_factor.0
}
/// A texture that defines the F0 color of the specular reflection,
/// stored in the `RGB` channels and encoded in sRGB. This texture
/// will be multiplied by `specular_color_factor`.
pub fn specular_color_texture(&self) -> Option<texture::Info<'a>> {
self.json.specular_color_texture.as_ref().map(|json| {
let texture = self.document.textures().nth(json.index.value()).unwrap();
texture::Info::new(texture, json)
})
}
/// Optional application specific data.
pub fn extras(&self) -> &'a json::Extras {
&self.json.extras
}
}
/// A set of parameter values that are used to define the specular-glossiness
/// material model from Physically-Based Rendering (PBR) methodology.
#[cfg(feature = "KHR_materials_pbrSpecularGlossiness")]
#[cfg_attr(docsrs, doc(cfg(feature = "KHR_materials_pbrSpecularGlossiness")))]
pub struct PbrSpecularGlossiness<'a> {
/// The parent `Document` struct.
document: &'a Document,
/// The corresponding JSON struct.
json: &'a json::extensions::material::PbrSpecularGlossiness,
}
#[cfg(feature = "KHR_materials_pbrSpecularGlossiness")]
#[cfg_attr(docsrs, doc(cfg(feature = "KHR_materials_pbrSpecularGlossiness")))]
impl<'a> PbrSpecularGlossiness<'a> {
/// Constructs `PbrSpecularGlossiness`.
pub(crate) fn new(
document: &'a Document,
json: &'a json::extensions::material::PbrSpecularGlossiness,
) -> Self {
Self { document, json }
}
/// Returns the material's base color factor.
///
/// The default value is `[1.0, 1.0, 1.0, 1.0]`.
pub fn diffuse_factor(&self) -> [f32; 4] {
self.json.diffuse_factor.0
}
/// Returns the base color texture.
pub fn diffuse_texture(&self) -> Option<texture::Info<'a>> {
self.json.diffuse_texture.as_ref().map(|json| {
let texture = self.document.textures().nth(json.index.value()).unwrap();
texture::Info::new(texture, json)
})
}
/// Returns the specular factor of the material.
///
/// The default value is `[1.0, 1.0, 1.0]`.
pub fn specular_factor(&self) -> [f32; 3] {
self.json.specular_factor.0
}
/// Returns the glossiness factor of the material.
///
/// A value of 1.0 means the material has full glossiness or is perfectly
/// smooth. A value of 0.0 means the material has no glossiness or is
/// completely rough. This value is linear.
///
/// The default value is `1.0`.
pub fn glossiness_factor(&self) -> f32 {
self.json.glossiness_factor.0
}
/// The specular-glossiness texture.
///
/// A RGBA texture, containing the specular color of the material (RGB
/// components) and its glossiness (A component). The color values are in
/// sRGB space.
pub fn specular_glossiness_texture(&self) -> Option<texture::Info<'a>> {
self.json.specular_glossiness_texture.as_ref().map(|json| {
let texture = self.document.textures().nth(json.index.value()).unwrap();
texture::Info::new(texture, json)
})
}
/// Optional application specific data.
pub fn extras(&self) -> &'a json::Extras {
&self.json.extras
}
}
/// Defines the normal texture of a material.
pub struct NormalTexture<'a> {
/// The parent `Texture` struct.
texture: texture::Texture<'a>,
/// The corresponding JSON struct.
json: &'a json::material::NormalTexture,
}
impl<'a> NormalTexture<'a> {
/// Constructs a `NormalTexture`.
pub(crate) fn new(
texture: texture::Texture<'a>,
json: &'a json::material::NormalTexture,
) -> Self {
Self { texture, json }
}
/// Returns the scalar multiplier applied to each normal vector of the texture.
pub fn scale(&self) -> f32 {
self.json.scale
}
/// The set index of the texture's `TEXCOORD` attribute.
pub fn tex_coord(&self) -> u32 {
self.json.tex_coord
}
/// Returns the referenced texture.
pub fn texture(&self) -> texture::Texture<'a> {
self.texture.clone()
}
/// Returns extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extensions(&self) -> Option<&Map<String, Value>> {
let ext = self.json.extensions.as_ref()?;
Some(&ext.others)
}
/// Get the value of an extension based on the name of the extension
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extension_value(&self, key: &str) -> Option<&Value> {
let ext = self.json.extensions.as_ref()?;
ext.others.get(key)
}
/// Optional application specific data.
pub fn extras(&self) -> &'a json::Extras {
&self.json.extras
}
}
/// Defines the occlusion texture of a material.
pub struct OcclusionTexture<'a> {
/// The parent `Texture` struct.
texture: texture::Texture<'a>,
/// The corresponding JSON struct.
json: &'a json::material::OcclusionTexture,
}
impl<'a> OcclusionTexture<'a> {
/// Constructs a `OcclusionTexture`.
pub(crate) fn new(
texture: texture::Texture<'a>,
json: &'a json::material::OcclusionTexture,
) -> Self {
Self { texture, json }
}
/// Returns the scalar multiplier controlling the amount of occlusion applied.
pub fn strength(&self) -> f32 {
self.json.strength.0
}
/// Returns the set index of the texture's `TEXCOORD` attribute.
pub fn tex_coord(&self) -> u32 {
self.json.tex_coord
}
/// Returns the referenced texture.
pub fn texture(&self) -> texture::Texture<'a> {
self.texture.clone()
}
/// Returns extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extensions(&self) -> Option<&Map<String, Value>> {
let ext = self.json.extensions.as_ref()?;
Some(&ext.others)
}
/// Get the value of an extension based on the name of the extension
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extension_value(&self, key: &str) -> Option<&Value> {
let ext = self.json.extensions.as_ref()?;
ext.others.get(key)
}
/// Optional application specific data.
pub fn extras(&self) -> &'a json::Extras {
&self.json.extras
}
}
impl<'a> AsRef<texture::Texture<'a>> for NormalTexture<'a> {
fn as_ref(&self) -> &texture::Texture<'a> {
&self.texture
}
}
impl<'a> AsRef<texture::Texture<'a>> for OcclusionTexture<'a> {
fn as_ref(&self) -> &texture::Texture<'a> {
&self.texture
}
}

397
vendor/gltf/src/math.rs vendored Normal file
View File

@@ -0,0 +1,397 @@
// Copyright 2013-2014 The CGMath Developers.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Modified for the gltf crate by the gltf library developers.
use std::ops;
#[cfg(test)]
mod test {
use super::*;
impl approx::AbsDiffEq for Vector4 {
type Epsilon = f32;
fn default_epsilon() -> f32 {
f32::default_epsilon()
}
fn abs_diff_eq(&self, other: &Vector4, epsilon: Self::Epsilon) -> bool {
f32::abs_diff_eq(&self.x, &other.x, epsilon)
&& f32::abs_diff_eq(&self.y, &other.y, epsilon)
&& f32::abs_diff_eq(&self.z, &other.z, epsilon)
&& f32::abs_diff_eq(&self.w, &other.w, epsilon)
}
}
impl approx::RelativeEq for Vector4 {
fn default_max_relative() -> f32 {
f32::default_max_relative()
}
fn relative_eq(&self, other: &Self, epsilon: f32, max_relative: f32) -> bool {
f32::relative_eq(&self.x, &other.x, epsilon, max_relative)
&& f32::relative_eq(&self.y, &other.y, epsilon, max_relative)
&& f32::relative_eq(&self.z, &other.z, epsilon, max_relative)
&& f32::relative_eq(&self.w, &other.w, epsilon, max_relative)
}
}
impl approx::UlpsEq for Vector4 {
fn default_max_ulps() -> u32 {
f32::default_max_ulps()
}
fn ulps_eq(&self, other: &Self, epsilon: f32, max_ulps: u32) -> bool {
f32::ulps_eq(&self.x, &other.x, epsilon, max_ulps)
&& f32::ulps_eq(&self.y, &other.y, epsilon, max_ulps)
&& f32::ulps_eq(&self.z, &other.z, epsilon, max_ulps)
&& f32::ulps_eq(&self.w, &other.w, epsilon, max_ulps)
}
}
impl approx::AbsDiffEq for Matrix4 {
type Epsilon = f32;
fn default_epsilon() -> f32 {
f32::default_epsilon()
}
fn abs_diff_eq(&self, other: &Matrix4, epsilon: Self::Epsilon) -> bool {
Vector4::abs_diff_eq(&self.x, &other.x, epsilon)
&& Vector4::abs_diff_eq(&self.y, &other.y, epsilon)
&& Vector4::abs_diff_eq(&self.z, &other.z, epsilon)
&& Vector4::abs_diff_eq(&self.w, &other.w, epsilon)
}
}
impl approx::RelativeEq for Matrix4 {
fn default_max_relative() -> f32 {
f32::default_max_relative()
}
fn relative_eq(&self, other: &Self, epsilon: f32, max_relative: f32) -> bool {
Vector4::relative_eq(&self.x, &other.x, epsilon, max_relative)
&& Vector4::relative_eq(&self.y, &other.y, epsilon, max_relative)
&& Vector4::relative_eq(&self.z, &other.z, epsilon, max_relative)
&& Vector4::relative_eq(&self.w, &other.w, epsilon, max_relative)
}
}
impl approx::UlpsEq for Matrix4 {
fn default_max_ulps() -> u32 {
f32::default_max_ulps()
}
fn ulps_eq(&self, other: &Self, epsilon: f32, max_ulps: u32) -> bool {
Vector4::ulps_eq(&self.x, &other.x, epsilon, max_ulps)
&& Vector4::ulps_eq(&self.y, &other.y, epsilon, max_ulps)
&& Vector4::ulps_eq(&self.z, &other.z, epsilon, max_ulps)
&& Vector4::ulps_eq(&self.w, &other.w, epsilon, max_ulps)
}
}
}
#[derive(Copy, Clone, Debug, PartialEq)]
#[repr(C)]
pub struct Vector3 {
pub x: f32,
pub y: f32,
pub z: f32,
}
impl Vector3 {
pub fn new(x: f32, y: f32, z: f32) -> Self {
Vector3 { x, y, z }
}
pub fn magnitude(&self) -> f32 {
(self.x * self.x + self.y * self.y + self.z * self.z).sqrt()
}
pub fn multiply(&mut self, s: f32) {
self.x *= s;
self.y *= s;
self.z *= s;
}
#[cfg(test)]
pub fn normalize(self) -> Vector3 {
self * (1.0 / self.magnitude())
}
}
impl ops::Mul<f32> for Vector3 {
type Output = Vector3;
fn mul(mut self, rhs: f32) -> Self::Output {
self.multiply(rhs);
self
}
}
#[derive(Copy, Clone, Debug, PartialEq)]
#[repr(C)]
pub struct Vector4 {
pub x: f32,
pub y: f32,
pub z: f32,
pub w: f32,
}
impl Vector4 {
pub fn new(x: f32, y: f32, z: f32, w: f32) -> Self {
Vector4 { x, y, z, w }
}
pub fn multiply(&mut self, s: f32) {
self.x *= s;
self.y *= s;
self.z *= s;
self.w *= s;
}
pub fn as_array(&self) -> [f32; 4] {
[self.x, self.y, self.z, self.w]
}
#[cfg(test)]
pub fn from_array([x, y, z, w]: [f32; 4]) -> Self {
Self { x, y, z, w }
}
}
impl ops::Add for Vector4 {
type Output = Self;
fn add(self, other: Self) -> Self {
Self {
x: self.x + other.x,
y: self.y + other.y,
z: self.z + other.z,
w: self.w + other.w,
}
}
}
impl ops::Mul<f32> for Vector4 {
type Output = Vector4;
fn mul(mut self, rhs: f32) -> Self::Output {
self.multiply(rhs);
self
}
}
#[derive(Copy, Clone, Debug, PartialEq)]
#[repr(C)]
pub struct Matrix3 {
pub x: Vector3,
pub y: Vector3,
pub z: Vector3,
}
impl Matrix3 {
#[rustfmt::skip]
#[allow(clippy::too_many_arguments)]
pub fn new(
c0r0: f32, c0r1: f32, c0r2: f32,
c1r0: f32, c1r1: f32, c1r2: f32,
c2r0: f32, c2r1: f32, c2r2: f32,
) -> Matrix3 {
Matrix3 {
x: Vector3::new(c0r0, c0r1, c0r2),
y: Vector3::new(c1r0, c1r1, c1r2),
z: Vector3::new(c2r0, c2r1, c2r2),
}
}
pub fn determinant(&self) -> f32 {
self.x.x * (self.y.y * self.z.z - self.z.y * self.y.z)
- self.y.x * (self.x.y * self.z.z - self.z.y * self.x.z)
+ self.z.x * (self.x.y * self.y.z - self.y.y * self.x.z)
}
pub fn trace(&self) -> f32 {
self.x.x + self.y.y + self.z.z
}
}
#[derive(Copy, Clone, Debug, PartialEq)]
#[repr(C)]
pub struct Matrix4 {
pub x: Vector4,
pub y: Vector4,
pub z: Vector4,
pub w: Vector4,
}
impl Matrix4 {
#[rustfmt::skip]
#[allow(clippy::too_many_arguments)]
pub fn new(
c0r0: f32, c0r1: f32, c0r2: f32, c0r3: f32,
c1r0: f32, c1r1: f32, c1r2: f32, c1r3: f32,
c2r0: f32, c2r1: f32, c2r2: f32, c2r3: f32,
c3r0: f32, c3r1: f32, c3r2: f32, c3r3: f32,
) -> Matrix4 {
Matrix4 {
x: Vector4::new(c0r0, c0r1, c0r2, c0r3),
y: Vector4::new(c1r0, c1r1, c1r2, c1r3),
z: Vector4::new(c2r0, c2r1, c2r2, c2r3),
w: Vector4::new(c3r0, c3r1, c3r2, c3r3),
}
}
#[cfg(test)]
pub fn from_array([x, y, z, w]: [[f32; 4]; 4]) -> Matrix4 {
Matrix4 {
x: Vector4::from_array(x),
y: Vector4::from_array(y),
z: Vector4::from_array(z),
w: Vector4::from_array(w),
}
}
/// Create a homogeneous transformation matrix from a translation vector.
#[rustfmt::skip]
pub fn from_translation(v: Vector3) -> Matrix4 {
Matrix4::new(
1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
v.x, v.y, v.z, 1.0,
)
}
/// Create a homogeneous transformation matrix from a set of scale values.
#[rustfmt::skip]
pub fn from_nonuniform_scale(x: f32, y: f32, z: f32) -> Matrix4 {
Matrix4::new(
x, 0.0, 0.0, 0.0,
0.0, y, 0.0, 0.0,
0.0, 0.0, z, 0.0,
0.0, 0.0, 0.0, 1.0,
)
}
/// Convert the quaternion to a 4 x 4 rotation matrix.
pub fn from_quaternion(q: Quaternion) -> Matrix4 {
let x2 = q.v.x + q.v.x;
let y2 = q.v.y + q.v.y;
let z2 = q.v.z + q.v.z;
let xx2 = x2 * q.v.x;
let xy2 = x2 * q.v.y;
let xz2 = x2 * q.v.z;
let yy2 = y2 * q.v.y;
let yz2 = y2 * q.v.z;
let zz2 = z2 * q.v.z;
let sy2 = y2 * q.s;
let sz2 = z2 * q.s;
let sx2 = x2 * q.s;
Matrix4 {
x: Vector4::new(1.0 - yy2 - zz2, xy2 + sz2, xz2 - sy2, 0.0),
y: Vector4::new(xy2 - sz2, 1.0 - xx2 - zz2, yz2 + sx2, 0.0),
z: Vector4::new(xz2 + sy2, yz2 - sx2, 1.0 - xx2 - yy2, 0.0),
w: Vector4::new(0.0, 0.0, 0.0, 1.0),
}
}
pub fn as_array(&self) -> [[f32; 4]; 4] {
[
self.x.as_array(),
self.y.as_array(),
self.z.as_array(),
self.w.as_array(),
]
}
}
impl ops::Mul<Matrix4> for Matrix4 {
type Output = Matrix4;
fn mul(self, rhs: Matrix4) -> Self::Output {
let a = self.x;
let b = self.y;
let c = self.z;
let d = self.w;
Matrix4 {
x: a * rhs.x.x + b * rhs.x.y + c * rhs.x.z + d * rhs.x.w,
y: a * rhs.y.x + b * rhs.y.y + c * rhs.y.z + d * rhs.y.w,
z: a * rhs.z.x + b * rhs.z.y + c * rhs.z.z + d * rhs.z.w,
w: a * rhs.w.x + b * rhs.w.y + c * rhs.w.z + d * rhs.w.w,
}
}
}
#[derive(Copy, Clone, Debug, PartialEq)]
#[repr(C)]
pub struct Quaternion {
pub s: f32,
pub v: Vector3,
}
impl Quaternion {
pub fn new(w: f32, xi: f32, yj: f32, zk: f32) -> Quaternion {
Quaternion {
s: w,
v: Vector3::new(xi, yj, zk),
}
}
#[cfg(test)]
pub fn from_axis_angle(axis: Vector3, radians: f32) -> Quaternion {
Quaternion {
s: (0.5 * radians).cos(),
v: axis * (0.5 * radians).sin(),
}
}
/// Convert a rotation matrix to an equivalent quaternion.
pub fn from_matrix(m: Matrix3) -> Quaternion {
let trace = m.trace();
if trace >= 0.0 {
let s = (1.0 + trace).sqrt();
let w = 0.5 * s;
let s = 0.5 / s;
let x = (m.y.z - m.z.y) * s;
let y = (m.z.x - m.x.z) * s;
let z = (m.x.y - m.y.x) * s;
Quaternion::new(w, x, y, z)
} else if (m.x.x > m.y.y) && (m.x.x > m.z.z) {
let s = ((m.x.x - m.y.y - m.z.z) + 1.0).sqrt();
let x = 0.5 * s;
let s = 0.5 / s;
let y = (m.y.x + m.x.y) * s;
let z = (m.x.z + m.z.x) * s;
let w = (m.y.z - m.z.y) * s;
Quaternion::new(w, x, y, z)
} else if m.y.y > m.z.z {
let s = ((m.y.y - m.x.x - m.z.z) + 1.0).sqrt();
let y = 0.5 * s;
let s = 0.5 / s;
let z = (m.z.y + m.y.z) * s;
let x = (m.y.x + m.x.y) * s;
let w = (m.z.x - m.x.z) * s;
Quaternion::new(w, x, y, z)
} else {
let s = ((m.z.z - m.x.x - m.y.y) + 1.0).sqrt();
let z = 0.5 * s;
let s = 0.5 / s;
let x = (m.x.z + m.z.x) * s;
let y = (m.z.y + m.y.z) * s;
let w = (m.x.y - m.y.x) * s;
Quaternion::new(w, x, y, z)
}
}
}

177
vendor/gltf/src/mesh/iter.rs vendored Normal file
View File

@@ -0,0 +1,177 @@
use std::{collections, iter, slice};
use super::{Attribute, Mesh, MorphTarget, Primitive};
use crate::Document;
/// An `Iterator` that visits the morph targets of a `Primitive`.
#[derive(Clone, Debug)]
pub struct MorphTargets<'a> {
/// The parent `Document` struct.
pub(crate) document: &'a Document,
/// The internal JSON iterator.
pub(crate) iter: slice::Iter<'a, json::mesh::MorphTarget>,
}
/// An `Iterator` that visits the attributes of a `Primitive`.
#[derive(Clone, Debug)]
pub struct Attributes<'a> {
/// The parent `Document` struct.
pub(crate) document: &'a Document,
/// The parent `Primitive` struct.
#[allow(dead_code)]
pub(crate) prim: Primitive<'a>,
/// The internal attribute iterator.
pub(crate) iter: collections::btree_map::Iter<
'a,
json::validation::Checked<json::mesh::Semantic>,
json::Index<json::accessor::Accessor>,
>,
}
/// An `Iterator` that visits the primitives of a `Mesh`.
#[derive(Clone, Debug)]
pub struct Primitives<'a> {
/// The parent `Mesh` struct.
pub(crate) mesh: Mesh<'a>,
/// The internal JSON primitive iterator.
pub(crate) iter: iter::Enumerate<slice::Iter<'a, json::mesh::Primitive>>,
}
impl<'a> ExactSizeIterator for Attributes<'a> {}
impl<'a> Iterator for Attributes<'a> {
type Item = Attribute<'a>;
fn next(&mut self) -> Option<Self::Item> {
self.iter.next().map(|(key, index)| {
let semantic = key.as_ref().unwrap().clone();
let accessor = self.document.accessors().nth(index.value()).unwrap();
(semantic, accessor)
})
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<'a> ExactSizeIterator for Primitives<'a> {}
impl<'a> Iterator for Primitives<'a> {
type Item = Primitive<'a>;
fn next(&mut self) -> Option<Self::Item> {
self.iter
.next()
.map(|(index, json)| Primitive::new(self.mesh.clone(), index, json))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
fn count(self) -> usize {
self.iter.count()
}
fn last(self) -> Option<Self::Item> {
let mesh = self.mesh;
self.iter
.last()
.map(|(index, json)| Primitive::new(mesh, index, json))
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter
.nth(n)
.map(|(index, json)| Primitive::new(self.mesh.clone(), index, json))
}
}
fn map_morph_target<'a>(
document: &'a crate::Document,
json: &json::mesh::MorphTarget,
) -> MorphTarget<'a> {
let positions = json
.positions
.as_ref()
.map(|index| document.accessors().nth(index.value()).unwrap());
let normals = json
.normals
.as_ref()
.map(|index| document.accessors().nth(index.value()).unwrap());
let tangents = json
.tangents
.as_ref()
.map(|index| document.accessors().nth(index.value()).unwrap());
MorphTarget {
positions,
normals,
tangents,
}
}
impl<'a> ExactSizeIterator for MorphTargets<'a> {}
impl<'a> Iterator for MorphTargets<'a> {
type Item = MorphTarget<'a>;
fn next(&mut self) -> Option<Self::Item> {
self.iter
.next()
.map(|json| map_morph_target(self.document, json))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
fn count(self) -> usize {
self.iter.count()
}
fn last(self) -> Option<Self::Item> {
let document = self.document;
self.iter
.last()
.map(|json| map_morph_target(document, json))
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter
.nth(n)
.map(|json| map_morph_target(self.document, json))
}
}
/// An `Iterator` that visits the variant mappings of a `Mesh`.
#[cfg(feature = "KHR_materials_variants")]
#[derive(Clone, Debug)]
pub struct Mappings<'a> {
/// Internal mapping iterator.
pub(crate) iter: slice::Iter<'a, json::extensions::mesh::Mapping>,
/// The internal root glTF object.
pub(crate) document: &'a Document,
}
#[cfg(feature = "KHR_materials_variants")]
impl<'a> ExactSizeIterator for Mappings<'a> {}
#[cfg(feature = "KHR_materials_variants")]
impl<'a> Iterator for Mappings<'a> {
type Item = crate::khr_materials_variants::Mapping<'a>;
fn next(&mut self) -> Option<Self::Item> {
let document = self.document;
self.iter
.next()
.map(|json| crate::khr_materials_variants::Mapping::new(document, json))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
fn count(self) -> usize {
self.iter.count()
}
fn last(self) -> Option<Self::Item> {
let document = self.document;
self.iter
.last()
.map(|json| crate::khr_materials_variants::Mapping::new(document, json))
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
let document = self.document;
self.iter
.nth(n)
.map(|json| crate::khr_materials_variants::Mapping::new(document, json))
}
}

476
vendor/gltf/src/mesh/mod.rs vendored Normal file
View File

@@ -0,0 +1,476 @@
//! # Basic usage
//!
//! Listing the attributes of each mesh primitive in a glTF asset.
//!
//! ```
//! # fn run() -> Result<(), Box<dyn std::error::Error>> {
//! # let gltf = gltf::Gltf::open("examples/Box.gltf")?;
//! for mesh in gltf.meshes() {
//! println!("Mesh #{}", mesh.index());
//! for primitive in mesh.primitives() {
//! println!("- Primitive #{}", primitive.index());
//! for (semantic, _) in primitive.attributes() {
//! println!("-- {:?}", semantic);
//! }
//! }
//! }
//! # Ok(())
//! # }
//! # fn main() {
//! # let _ = run().expect("runtime error");
//! # }
//! ```
//!
//! # Reader utility
//!
//! Printing the vertex positions of each primitive of each mesh in
//! a glTF asset.
//!
//! ```
//! # fn run() -> Result<(), Box<dyn std::error::Error>> {
//! let (gltf, buffers, _) = gltf::import("examples/Box.gltf")?;
//! for mesh in gltf.meshes() {
//! println!("Mesh #{}", mesh.index());
//! for primitive in mesh.primitives() {
//! println!("- Primitive #{}", primitive.index());
//! let reader = primitive.reader(|buffer| Some(&buffers[buffer.index()]));
//! if let Some(iter) = reader.read_positions() {
//! for vertex_position in iter {
//! println!("{:?}", vertex_position);
//! }
//! }
//! }
//! }
//! # Ok(())
//! # }
//! # fn main() {
//! # let _ = run().expect("runtime error");
//! # }
//! ```
/// Iterators.
pub mod iter;
/// Utility functions.
#[cfg(feature = "utils")]
#[cfg_attr(docsrs, doc(cfg(feature = "utils")))]
pub mod util;
use crate::{Accessor, Buffer, Document, Material};
#[cfg(feature = "utils")]
use crate::accessor;
pub use json::mesh::{Mode, Semantic};
use json::validation::Checked;
#[cfg(feature = "extensions")]
use serde_json::{Map, Value};
/// Vertex attribute data.
pub type Attribute<'a> = (Semantic, Accessor<'a>);
/// Vertex position bounding box.
pub type BoundingBox = Bounds<[f32; 3]>;
/// The minimum and maximum values for a generic accessor.
#[derive(Clone, Debug, PartialEq)]
pub struct Bounds<T> {
/// Minimum value.
pub min: T,
/// Maximum value.
pub max: T,
}
/// A set of primitives to be rendered.
#[derive(Clone, Debug)]
pub struct Mesh<'a> {
/// The parent `Document` struct.
document: &'a Document,
/// The corresponding JSON index.
index: usize,
/// The corresponding JSON struct.
json: &'a json::mesh::Mesh,
}
/// A single morph target for a mesh primitive.
#[derive(Clone, Debug)]
pub struct MorphTarget<'a> {
/// XYZ vertex position displacements.
positions: Option<Accessor<'a>>,
/// XYZ vertex normal displacements.
normals: Option<Accessor<'a>>,
/// XYZ vertex tangent displacements.
tangents: Option<Accessor<'a>>,
}
/// Geometry to be rendered with the given material.
#[derive(Clone, Debug)]
pub struct Primitive<'a> {
/// The parent `Mesh` struct.
mesh: Mesh<'a>,
/// The corresponding JSON index.
index: usize,
/// The corresponding JSON struct.
json: &'a json::mesh::Primitive,
}
/// Mesh primitive reader.
#[derive(Clone, Debug)]
pub struct Reader<'a, 's, F>
where
F: Clone + Fn(Buffer<'a>) -> Option<&'s [u8]>,
{
#[allow(dead_code)]
pub(crate) primitive: &'a Primitive<'a>,
#[allow(dead_code)]
pub(crate) get_buffer_data: F,
}
impl<'a> Mesh<'a> {
/// Constructs a `Mesh`.
pub(crate) fn new(document: &'a Document, index: usize, json: &'a json::mesh::Mesh) -> Self {
Self {
document,
index,
json,
}
}
/// Returns the internal JSON index.
pub fn index(&self) -> usize {
self.index
}
/// Returns extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extensions(&self) -> Option<&Map<String, Value>> {
let ext = self.json.extensions.as_ref()?;
Some(&ext.others)
}
/// Queries extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extension_value(&self, ext_name: &str) -> Option<&Value> {
let ext = self.json.extensions.as_ref()?;
ext.others.get(ext_name)
}
/// Optional application specific data.
pub fn extras(&self) -> &'a json::Extras {
&self.json.extras
}
/// Optional user-defined name for this object.
#[cfg(feature = "names")]
#[cfg_attr(docsrs, doc(cfg(feature = "names")))]
pub fn name(&self) -> Option<&'a str> {
self.json.name.as_deref()
}
/// Defines the geometry to be renderered with a material.
pub fn primitives(&self) -> iter::Primitives<'a> {
iter::Primitives {
mesh: self.clone(),
iter: self.json.primitives.iter().enumerate(),
}
}
/// Defines the weights to be applied to the morph targets.
pub fn weights(&self) -> Option<&'a [f32]> {
self.json.weights.as_deref()
}
}
impl<'a> Primitive<'a> {
/// Constructs a `Primitive`.
pub(crate) fn new(mesh: Mesh<'a>, index: usize, json: &'a json::mesh::Primitive) -> Self {
Self { mesh, index, json }
}
/// Returns the bounds of the `POSITION` vertex attribute.
pub fn bounding_box(&self) -> BoundingBox {
// NOTE: cannot panic if validated "minimally"
let pos_accessor_index = self
.json
.attributes
.get(&Checked::Valid(Semantic::Positions))
.unwrap();
let pos_accessor = self
.mesh
.document
.accessors()
.nth(pos_accessor_index.value())
.unwrap();
let min: [f32; 3] = json::deserialize::from_value(pos_accessor.min().unwrap()).unwrap();
let max: [f32; 3] = json::deserialize::from_value(pos_accessor.max().unwrap()).unwrap();
Bounds { min, max }
}
/// Returns extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extensions(&self) -> Option<&Map<String, Value>> {
let ext = self.json.extensions.as_ref()?;
Some(&ext.others)
}
/// Queries extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extension_value(&self, ext_name: &str) -> Option<&Value> {
let ext = self.json.extensions.as_ref()?;
ext.others.get(ext_name)
}
/// Optional application specific data.
pub fn extras(&self) -> &'a json::Extras {
&self.json.extras
}
/// Return the accessor with the given semantic.
pub fn get(&self, semantic: &Semantic) -> Option<Accessor<'a>> {
self.json
.attributes
.get(&json::validation::Checked::Valid(semantic.clone()))
.map(|index| self.mesh.document.accessors().nth(index.value()).unwrap())
}
/// Returns the internal JSON index.
pub fn index(&self) -> usize {
self.index
}
/// Returns the accessor containing the primitive indices, if provided.
pub fn indices(&self) -> Option<Accessor<'a>> {
self.json
.indices
.as_ref()
.map(|index| self.mesh.document.accessors().nth(index.value()).unwrap())
}
/// Returns an `Iterator` that visits the vertex attributes.
pub fn attributes(&self) -> iter::Attributes<'a> {
iter::Attributes {
document: self.mesh.document,
prim: self.clone(),
iter: self.json.attributes.iter(),
}
}
/// Returns the material to apply to this primitive when rendering
pub fn material(&self) -> Material<'a> {
self.json
.material
.as_ref()
.map(|index| self.mesh.document.materials().nth(index.value()).unwrap())
.unwrap_or_else(|| Material::default(self.mesh.document))
}
/// The type of primitives to render.
pub fn mode(&self) -> Mode {
self.json.mode.unwrap()
}
/// Returns an `Iterator` that visits the morph targets of the primitive.
pub fn morph_targets(&self) -> iter::MorphTargets<'a> {
if let Some(slice) = self.json.targets.as_ref() {
iter::MorphTargets {
document: self.mesh.document,
iter: slice.iter(),
}
} else {
iter::MorphTargets {
document: self.mesh.document,
iter: ([]).iter(),
}
}
}
/// Get the material variants.
#[cfg(feature = "KHR_materials_variants")]
#[cfg_attr(docsrs, doc(cfg(feature = "KHR_materials_variants")))]
pub fn mappings(&self) -> iter::Mappings<'a> {
let iter = self
.json
.extensions
.as_ref()
.and_then(|extensions| extensions.khr_materials_variants.as_ref())
.map(|variants| variants.mappings.iter())
.unwrap_or_else(|| ([]).iter());
iter::Mappings {
document: self.mesh.document,
iter,
}
}
/// Constructs the primitive reader.
#[cfg(feature = "utils")]
#[cfg_attr(docsrs, doc(cfg(feature = "utils")))]
pub fn reader<'s, F>(&'a self, get_buffer_data: F) -> Reader<'a, 's, F>
where
F: Clone + Fn(Buffer<'a>) -> Option<&'s [u8]>,
{
Reader {
primitive: self,
get_buffer_data,
}
}
}
#[cfg(feature = "utils")]
impl<'a, 's, F> Reader<'a, 's, F>
where
F: Clone + Fn(Buffer<'a>) -> Option<&'s [u8]>,
{
/// Visits the vertex positions of a primitive.
pub fn read_positions(&self) -> Option<util::ReadPositions<'s>> {
self.primitive
.get(&Semantic::Positions)
.and_then(|accessor| accessor::Iter::new(accessor, self.get_buffer_data.clone()))
}
/// Visits the vertex normals of a primitive.
pub fn read_normals(&self) -> Option<util::ReadNormals<'s>> {
self.primitive
.get(&Semantic::Normals)
.and_then(|accessor| accessor::Iter::new(accessor, self.get_buffer_data.clone()))
}
/// Visits the vertex tangents of a primitive.
pub fn read_tangents(&self) -> Option<util::ReadTangents<'s>> {
self.primitive
.get(&Semantic::Tangents)
.and_then(|accessor| accessor::Iter::new(accessor, self.get_buffer_data.clone()))
}
/// Visits the vertex colors of a primitive.
pub fn read_colors(&self, set: u32) -> Option<util::ReadColors<'s>> {
use self::util::ReadColors;
use accessor::DataType::{F32, U16, U8};
use accessor::Dimensions::{Vec3, Vec4};
self.primitive
.get(&Semantic::Colors(set))
.and_then(
|accessor| match (accessor.data_type(), accessor.dimensions()) {
(U8, Vec3) => accessor::Iter::new(accessor, self.get_buffer_data.clone())
.map(ReadColors::RgbU8),
(U16, Vec3) => accessor::Iter::new(accessor, self.get_buffer_data.clone())
.map(ReadColors::RgbU16),
(F32, Vec3) => accessor::Iter::new(accessor, self.get_buffer_data.clone())
.map(ReadColors::RgbF32),
(U8, Vec4) => accessor::Iter::new(accessor, self.get_buffer_data.clone())
.map(ReadColors::RgbaU8),
(U16, Vec4) => accessor::Iter::new(accessor, self.get_buffer_data.clone())
.map(ReadColors::RgbaU16),
(F32, Vec4) => accessor::Iter::new(accessor, self.get_buffer_data.clone())
.map(ReadColors::RgbaF32),
_ => unreachable!(),
},
)
}
/// Visits the vertex draw sequence of a primitive.
pub fn read_indices(&self) -> Option<util::ReadIndices<'s>> {
use self::util::ReadIndices;
use accessor::DataType;
self.primitive
.indices()
.and_then(|accessor| match accessor.data_type() {
DataType::U8 => {
accessor::Iter::new(accessor, self.get_buffer_data.clone()).map(ReadIndices::U8)
}
DataType::U16 => accessor::Iter::new(accessor, self.get_buffer_data.clone())
.map(ReadIndices::U16),
DataType::U32 => accessor::Iter::new(accessor, self.get_buffer_data.clone())
.map(ReadIndices::U32),
_ => unreachable!(),
})
}
/// Visits the joint indices of the primitive.
pub fn read_joints(&self, set: u32) -> Option<util::ReadJoints<'s>> {
use self::util::ReadJoints;
use accessor::DataType;
self.primitive
.get(&Semantic::Joints(set))
.and_then(|accessor| match accessor.data_type() {
DataType::U8 => {
accessor::Iter::new(accessor, self.get_buffer_data.clone()).map(ReadJoints::U8)
}
DataType::U16 => {
accessor::Iter::new(accessor, self.get_buffer_data.clone()).map(ReadJoints::U16)
}
_ => unreachable!(),
})
}
/// Visits the vertex texture co-ordinates of a primitive.
pub fn read_tex_coords(&self, set: u32) -> Option<util::ReadTexCoords<'s>> {
use self::util::ReadTexCoords;
use accessor::DataType;
self.primitive
.get(&Semantic::TexCoords(set))
.and_then(|accessor| match accessor.data_type() {
DataType::U8 => accessor::Iter::new(accessor, self.get_buffer_data.clone())
.map(ReadTexCoords::U8),
DataType::U16 => accessor::Iter::new(accessor, self.get_buffer_data.clone())
.map(ReadTexCoords::U16),
DataType::F32 => accessor::Iter::new(accessor, self.get_buffer_data.clone())
.map(ReadTexCoords::F32),
_ => unreachable!(),
})
}
/// Visits the joint weights of the primitive.
pub fn read_weights(&self, set: u32) -> Option<util::ReadWeights<'s>> {
use self::accessor::DataType;
use self::util::ReadWeights;
self.primitive
.get(&Semantic::Weights(set))
.and_then(|accessor| match accessor.data_type() {
DataType::U8 => {
accessor::Iter::new(accessor, self.get_buffer_data.clone()).map(ReadWeights::U8)
}
DataType::U16 => accessor::Iter::new(accessor, self.get_buffer_data.clone())
.map(ReadWeights::U16),
DataType::F32 => accessor::Iter::new(accessor, self.get_buffer_data.clone())
.map(ReadWeights::F32),
_ => unreachable!(),
})
}
/// Visits the morph targets of the primitive.
pub fn read_morph_targets(&self) -> util::ReadMorphTargets<'a, 's, F> {
util::ReadMorphTargets {
index: 0,
reader: self.clone(),
}
}
}
impl<'a> MorphTarget<'a> {
/// Returns the XYZ vertex position displacements.
pub fn positions(&self) -> Option<Accessor<'a>> {
self.positions.clone()
}
/// Returns the XYZ vertex normal displacements.
pub fn normals(&self) -> Option<Accessor<'a>> {
self.normals.clone()
}
/// Returns the XYZ vertex tangent displacements.
pub fn tangents(&self) -> Option<Accessor<'a>> {
self.tangents.clone()
}
}

337
vendor/gltf/src/mesh/util/colors.rs vendored Normal file
View File

@@ -0,0 +1,337 @@
use std::marker::PhantomData;
use crate::Normalize;
use super::ReadColors;
/// Casting iterator for `Colors`.
#[derive(Clone, Debug)]
pub struct CastingIter<'a, T>(ReadColors<'a>, PhantomData<T>);
/// Type which describes how to cast any color into RGB u8.
#[derive(Clone, Debug)]
pub struct RgbU8;
/// Type which describes how to cast any color into RGB u16.
#[derive(Clone, Debug)]
pub struct RgbU16;
/// Type which describes how to cast any color into RGB f32.
#[derive(Clone, Debug)]
pub struct RgbF32;
/// Type which describes how to cast any color into RGBA u8.
#[derive(Clone, Debug)]
pub struct RgbaU8;
/// Type which describes how to cast any color into RGBA u16.
#[derive(Clone, Debug)]
pub struct RgbaU16;
/// Type which describes how to cast any color into RGBA f32.
#[derive(Clone, Debug)]
pub struct RgbaF32;
trait ColorChannel {
fn max_color() -> Self;
}
impl ColorChannel for u8 {
fn max_color() -> Self {
u8::max_value()
}
}
impl ColorChannel for u16 {
fn max_color() -> Self {
u16::max_value()
}
}
impl ColorChannel for f32 {
fn max_color() -> Self {
1.0
}
}
trait ColorArray<T> {
fn into_rgb(self) -> [T; 3];
fn into_rgba(self) -> [T; 4];
}
impl<T: Copy + ColorChannel> ColorArray<T> for [T; 3] {
fn into_rgb(self) -> [T; 3] {
self
}
fn into_rgba(self) -> [T; 4] {
[self[0], self[1], self[2], T::max_color()]
}
}
impl<T: Copy + ColorChannel> ColorArray<T> for [T; 4] {
fn into_rgb(self) -> [T; 3] {
[self[0], self[1], self[2]]
}
fn into_rgba(self) -> [T; 4] {
self
}
}
/// Trait for types which describe casting behaviour.
pub trait Cast {
/// Output type.
type Output;
/// Cast from RGB u8.
fn cast_rgb_u8(x: [u8; 3]) -> Self::Output;
/// Cast from RGB u16.
fn cast_rgb_u16(x: [u16; 3]) -> Self::Output;
/// Cast from RGB f32.
fn cast_rgb_f32(x: [f32; 3]) -> Self::Output;
/// Cast from RGBA u8.
fn cast_rgba_u8(x: [u8; 4]) -> Self::Output;
/// Cast from RGBA u16.
fn cast_rgba_u16(x: [u16; 4]) -> Self::Output;
/// Cast from RGBA f32.
fn cast_rgba_f32(x: [f32; 4]) -> Self::Output;
}
impl<'a, A> CastingIter<'a, A> {
pub(crate) fn new(iter: ReadColors<'a>) -> Self {
CastingIter(iter, PhantomData)
}
/// Unwrap underlying `ReadColors` object.
pub fn unwrap(self) -> ReadColors<'a> {
self.0
}
}
impl<'a, A: Cast> ExactSizeIterator for CastingIter<'a, A> {}
impl<'a, A: Cast> Iterator for CastingIter<'a, A> {
type Item = A::Output;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
match self.0 {
ReadColors::RgbU8(ref mut i) => i.next().map(A::cast_rgb_u8),
ReadColors::RgbU16(ref mut i) => i.next().map(A::cast_rgb_u16),
ReadColors::RgbF32(ref mut i) => i.next().map(A::cast_rgb_f32),
ReadColors::RgbaU8(ref mut i) => i.next().map(A::cast_rgba_u8),
ReadColors::RgbaU16(ref mut i) => i.next().map(A::cast_rgba_u16),
ReadColors::RgbaF32(ref mut i) => i.next().map(A::cast_rgba_f32),
}
}
#[inline]
fn nth(&mut self, x: usize) -> Option<Self::Item> {
match self.0 {
ReadColors::RgbU8(ref mut i) => i.nth(x).map(A::cast_rgb_u8),
ReadColors::RgbU16(ref mut i) => i.nth(x).map(A::cast_rgb_u16),
ReadColors::RgbF32(ref mut i) => i.nth(x).map(A::cast_rgb_f32),
ReadColors::RgbaU8(ref mut i) => i.nth(x).map(A::cast_rgba_u8),
ReadColors::RgbaU16(ref mut i) => i.nth(x).map(A::cast_rgba_u16),
ReadColors::RgbaF32(ref mut i) => i.nth(x).map(A::cast_rgba_f32),
}
}
fn last(self) -> Option<Self::Item> {
match self.0 {
ReadColors::RgbU8(i) => i.last().map(A::cast_rgb_u8),
ReadColors::RgbU16(i) => i.last().map(A::cast_rgb_u16),
ReadColors::RgbF32(i) => i.last().map(A::cast_rgb_f32),
ReadColors::RgbaU8(i) => i.last().map(A::cast_rgba_u8),
ReadColors::RgbaU16(i) => i.last().map(A::cast_rgba_u16),
ReadColors::RgbaF32(i) => i.last().map(A::cast_rgba_f32),
}
}
fn count(self) -> usize {
self.size_hint().0
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
match self.0 {
ReadColors::RgbU8(ref i) => i.size_hint(),
ReadColors::RgbU16(ref i) => i.size_hint(),
ReadColors::RgbF32(ref i) => i.size_hint(),
ReadColors::RgbaU8(ref i) => i.size_hint(),
ReadColors::RgbaU16(ref i) => i.size_hint(),
ReadColors::RgbaF32(ref i) => i.size_hint(),
}
}
}
impl Cast for RgbU8 {
type Output = [u8; 3];
fn cast_rgb_u8(x: [u8; 3]) -> Self::Output {
x.into_rgb().normalize()
}
fn cast_rgb_u16(x: [u16; 3]) -> Self::Output {
x.into_rgb().normalize()
}
fn cast_rgb_f32(x: [f32; 3]) -> Self::Output {
x.into_rgb().normalize()
}
fn cast_rgba_u8(x: [u8; 4]) -> Self::Output {
x.into_rgb().normalize()
}
fn cast_rgba_u16(x: [u16; 4]) -> Self::Output {
x.into_rgb().normalize()
}
fn cast_rgba_f32(x: [f32; 4]) -> Self::Output {
x.into_rgb().normalize()
}
}
impl Cast for RgbU16 {
type Output = [u16; 3];
fn cast_rgb_u8(x: [u8; 3]) -> Self::Output {
x.into_rgb().normalize()
}
fn cast_rgb_u16(x: [u16; 3]) -> Self::Output {
x.into_rgb().normalize()
}
fn cast_rgb_f32(x: [f32; 3]) -> Self::Output {
x.into_rgb().normalize()
}
fn cast_rgba_u8(x: [u8; 4]) -> Self::Output {
x.into_rgb().normalize()
}
fn cast_rgba_u16(x: [u16; 4]) -> Self::Output {
x.into_rgb().normalize()
}
fn cast_rgba_f32(x: [f32; 4]) -> Self::Output {
x.into_rgb().normalize()
}
}
impl Cast for RgbF32 {
type Output = [f32; 3];
fn cast_rgb_u8(x: [u8; 3]) -> Self::Output {
x.into_rgb().normalize()
}
fn cast_rgb_u16(x: [u16; 3]) -> Self::Output {
x.into_rgb().normalize()
}
fn cast_rgb_f32(x: [f32; 3]) -> Self::Output {
x.into_rgb().normalize()
}
fn cast_rgba_u8(x: [u8; 4]) -> Self::Output {
x.into_rgb().normalize()
}
fn cast_rgba_u16(x: [u16; 4]) -> Self::Output {
x.into_rgb().normalize()
}
fn cast_rgba_f32(x: [f32; 4]) -> Self::Output {
x.into_rgb().normalize()
}
}
impl Cast for RgbaU8 {
type Output = [u8; 4];
fn cast_rgb_u8(x: [u8; 3]) -> Self::Output {
x.normalize().into_rgba()
}
fn cast_rgb_u16(x: [u16; 3]) -> Self::Output {
x.normalize().into_rgba()
}
fn cast_rgb_f32(x: [f32; 3]) -> Self::Output {
x.normalize().into_rgba()
}
fn cast_rgba_u8(x: [u8; 4]) -> Self::Output {
x.normalize().into_rgba()
}
fn cast_rgba_u16(x: [u16; 4]) -> Self::Output {
x.normalize().into_rgba()
}
fn cast_rgba_f32(x: [f32; 4]) -> Self::Output {
x.normalize().into_rgba()
}
}
impl Cast for RgbaU16 {
type Output = [u16; 4];
fn cast_rgb_u8(x: [u8; 3]) -> Self::Output {
x.normalize().into_rgba()
}
fn cast_rgb_u16(x: [u16; 3]) -> Self::Output {
x.normalize().into_rgba()
}
fn cast_rgb_f32(x: [f32; 3]) -> Self::Output {
x.normalize().into_rgba()
}
fn cast_rgba_u8(x: [u8; 4]) -> Self::Output {
x.normalize().into_rgba()
}
fn cast_rgba_u16(x: [u16; 4]) -> Self::Output {
x.normalize().into_rgba()
}
fn cast_rgba_f32(x: [f32; 4]) -> Self::Output {
x.normalize().into_rgba()
}
}
impl Cast for RgbaF32 {
type Output = [f32; 4];
fn cast_rgb_u8(x: [u8; 3]) -> Self::Output {
x.normalize().into_rgba()
}
fn cast_rgb_u16(x: [u16; 3]) -> Self::Output {
x.normalize().into_rgba()
}
fn cast_rgb_f32(x: [f32; 3]) -> Self::Output {
x.normalize().into_rgba()
}
fn cast_rgba_u8(x: [u8; 4]) -> Self::Output {
x.normalize().into_rgba()
}
fn cast_rgba_u16(x: [u16; 4]) -> Self::Output {
x.normalize().into_rgba()
}
fn cast_rgba_f32(x: [f32; 4]) -> Self::Output {
x.normalize().into_rgba()
}
}

95
vendor/gltf/src/mesh/util/indices.rs vendored Normal file
View File

@@ -0,0 +1,95 @@
use std::marker::PhantomData;
use super::ReadIndices;
/// Casting iterator for `Indices`.
#[derive(Clone, Debug)]
pub struct CastingIter<'a, T>(ReadIndices<'a>, PhantomData<T>);
/// Type which describes how to cast any index into u32.
#[derive(Clone, Debug)]
pub struct U32;
/// Trait for types which describe casting behaviour.
pub trait Cast {
/// Output type.
type Output;
/// Cast from u8.
fn cast_u8(x: u8) -> Self::Output;
/// Cast from u16.
fn cast_u16(x: u16) -> Self::Output;
/// Cast from u32.
fn cast_u32(x: u32) -> Self::Output;
}
impl<'a, A> CastingIter<'a, A> {
pub(crate) fn new(iter: ReadIndices<'a>) -> Self {
CastingIter(iter, PhantomData)
}
/// Unwrap underlying `Indices` object.
pub fn unwrap(self) -> ReadIndices<'a> {
self.0
}
}
impl<'a, A: Cast> ExactSizeIterator for CastingIter<'a, A> {}
impl<'a, A: Cast> Iterator for CastingIter<'a, A> {
type Item = A::Output;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
match self.0 {
ReadIndices::U8(ref mut i) => i.next().map(A::cast_u8),
ReadIndices::U16(ref mut i) => i.next().map(A::cast_u16),
ReadIndices::U32(ref mut i) => i.next().map(A::cast_u32),
}
}
#[inline]
fn nth(&mut self, x: usize) -> Option<Self::Item> {
match self.0 {
ReadIndices::U8(ref mut i) => i.nth(x).map(A::cast_u8),
ReadIndices::U16(ref mut i) => i.nth(x).map(A::cast_u16),
ReadIndices::U32(ref mut i) => i.nth(x).map(A::cast_u32),
}
}
fn last(self) -> Option<Self::Item> {
match self.0 {
ReadIndices::U8(i) => i.last().map(A::cast_u8),
ReadIndices::U16(i) => i.last().map(A::cast_u16),
ReadIndices::U32(i) => i.last().map(A::cast_u32),
}
}
fn count(self) -> usize {
self.size_hint().0
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
match self.0 {
ReadIndices::U8(ref i) => i.size_hint(),
ReadIndices::U16(ref i) => i.size_hint(),
ReadIndices::U32(ref i) => i.size_hint(),
}
}
}
impl Cast for U32 {
type Output = u32;
fn cast_u8(x: u8) -> Self::Output {
x as Self::Output
}
fn cast_u16(x: u16) -> Self::Output {
x as Self::Output
}
fn cast_u32(x: u32) -> Self::Output {
x
}
}

86
vendor/gltf/src/mesh/util/joints.rs vendored Normal file
View File

@@ -0,0 +1,86 @@
use std::marker::PhantomData;
use super::ReadJoints;
/// Casting iterator for `Joints`.
#[derive(Clone, Debug)]
pub struct CastingIter<'a, T>(ReadJoints<'a>, PhantomData<T>);
/// Type which describes how to cast any joint into u16.
#[derive(Clone, Debug)]
pub struct U16;
/// Trait for types which describe casting behaviour.
pub trait Cast {
/// Output type.
type Output;
/// Cast from u8.
fn cast_u8(x: [u8; 4]) -> Self::Output;
/// Cast from u16.
fn cast_u16(x: [u16; 4]) -> Self::Output;
}
impl<'a, A> CastingIter<'a, A> {
pub(crate) fn new(iter: ReadJoints<'a>) -> Self {
CastingIter(iter, PhantomData)
}
/// Unwrap underlying `Joints` object.
pub fn unwrap(self) -> ReadJoints<'a> {
self.0
}
}
impl<'a, A: Cast> ExactSizeIterator for CastingIter<'a, A> {}
impl<'a, A: Cast> Iterator for CastingIter<'a, A> {
type Item = A::Output;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
match self.0 {
ReadJoints::U8(ref mut i) => i.next().map(A::cast_u8),
ReadJoints::U16(ref mut i) => i.next().map(A::cast_u16),
}
}
#[inline]
fn nth(&mut self, x: usize) -> Option<Self::Item> {
match self.0 {
ReadJoints::U8(ref mut i) => i.nth(x).map(A::cast_u8),
ReadJoints::U16(ref mut i) => i.nth(x).map(A::cast_u16),
}
}
fn last(self) -> Option<Self::Item> {
match self.0 {
ReadJoints::U8(i) => i.last().map(A::cast_u8),
ReadJoints::U16(i) => i.last().map(A::cast_u16),
}
}
fn count(self) -> usize {
self.size_hint().0
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
match self.0 {
ReadJoints::U8(ref i) => i.size_hint(),
ReadJoints::U16(ref i) => i.size_hint(),
}
}
}
impl Cast for U16 {
type Output = [u16; 4];
fn cast_u8(x: [u8; 4]) -> Self::Output {
[x[0] as u16, x[1] as u16, x[2] as u16, x[3] as u16]
}
fn cast_u16(x: [u16; 4]) -> Self::Output {
x
}
}

242
vendor/gltf/src/mesh/util/mod.rs vendored Normal file
View File

@@ -0,0 +1,242 @@
/// Casting iterator adapters for colors.
pub mod colors;
/// Casting iterator adapters for vertex indices.
pub mod indices;
/// Casting iterator adapters for joint indices.
pub mod joints;
/// Casting iterator adapters for texture co-ordinates.
pub mod tex_coords;
/// Casting iterator adapters for node weights.
pub mod weights;
use crate::mesh;
use crate::accessor::Iter;
use crate::Buffer;
/// XYZ vertex positions of type `[f32; 3]`.
pub type ReadPositions<'a> = Iter<'a, [f32; 3]>;
/// XYZ vertex normals of type `[f32; 3]`.
pub type ReadNormals<'a> = Iter<'a, [f32; 3]>;
/// XYZW vertex tangents of type `[f32; 4]` where the `w` component is a
/// sign value (-1 or +1) indicating the handedness of the tangent basis.
pub type ReadTangents<'a> = Iter<'a, [f32; 4]>;
/// XYZ vertex position displacements of type `[f32; 3]`.
pub type ReadPositionDisplacements<'a> = Iter<'a, [f32; 3]>;
/// XYZ vertex normal displacements of type `[f32; 3]`.
pub type ReadNormalDisplacements<'a> = Iter<'a, [f32; 3]>;
/// XYZ vertex tangent displacements.
pub type ReadTangentDisplacements<'a> = Iter<'a, [f32; 3]>;
/// Vertex colors.
#[derive(Clone, Debug)]
pub enum ReadColors<'a> {
/// RGB vertex color of type `[u8; 3]>`.
RgbU8(Iter<'a, [u8; 3]>),
/// RGB vertex color of type `[u16; 3]>`.
RgbU16(Iter<'a, [u16; 3]>),
/// RGB vertex color of type `[f32; 3]`.
RgbF32(Iter<'a, [f32; 3]>),
/// RGBA vertex color of type `[u8; 4]>`.
RgbaU8(Iter<'a, [u8; 4]>),
/// RGBA vertex color of type `[u16; 4]>`.
RgbaU16(Iter<'a, [u16; 4]>),
/// RGBA vertex color of type `[f32; 4]`.
RgbaF32(Iter<'a, [f32; 4]>),
}
/// Index data.
#[derive(Clone, Debug)]
pub enum ReadIndices<'a> {
/// Index data of type U8
U8(Iter<'a, u8>),
/// Index data of type U16
U16(Iter<'a, u16>),
/// Index data of type U32
U32(Iter<'a, u32>),
}
/// Vertex joints.
#[derive(Clone, Debug)]
pub enum ReadJoints<'a> {
/// Joints of type `[u8; 4]`.
/// Refer to the documentation on morph targets and skins for more
/// information.
U8(Iter<'a, [u8; 4]>),
/// Joints of type `[u16; 4]`.
/// Refer to the documentation on morph targets and skins for more
/// information.
U16(Iter<'a, [u16; 4]>),
}
/// UV texture co-ordinates.
#[derive(Clone, Debug)]
pub enum ReadTexCoords<'a> {
/// UV texture co-ordinates of type `[u8; 2]>`.
U8(Iter<'a, [u8; 2]>),
/// UV texture co-ordinates of type `[u16; 2]>`.
U16(Iter<'a, [u16; 2]>),
/// UV texture co-ordinates of type `[f32; 2]`.
F32(Iter<'a, [f32; 2]>),
}
/// Weights.
#[derive(Clone, Debug)]
pub enum ReadWeights<'a> {
/// Weights of type `[u8; 4]`.
U8(Iter<'a, [u8; 4]>),
/// Weights of type `[u16; 4]`.
U16(Iter<'a, [u16; 4]>),
/// Weights of type `[f32; 4]`.
F32(Iter<'a, [f32; 4]>),
}
/// Morph targets.
#[derive(Clone, Debug)]
pub struct ReadMorphTargets<'a, 's, F>
where
F: Clone + Fn(Buffer<'a>) -> Option<&'s [u8]>,
{
pub(crate) index: usize,
pub(crate) reader: mesh::Reader<'a, 's, F>,
}
impl<'a, 's, F> ExactSizeIterator for ReadMorphTargets<'a, 's, F> where
F: Clone + Fn(Buffer<'a>) -> Option<&'s [u8]>
{
}
impl<'a, 's, F> Iterator for ReadMorphTargets<'a, 's, F>
where
F: Clone + Fn(Buffer<'a>) -> Option<&'s [u8]>,
{
type Item = (
Option<ReadPositionDisplacements<'s>>,
Option<ReadNormalDisplacements<'s>>,
Option<ReadTangentDisplacements<'s>>,
);
fn next(&mut self) -> Option<Self::Item> {
self.index += 1;
self.reader
.primitive
.morph_targets()
.nth(self.index - 1)
.map(|morph_target| {
let positions = morph_target
.positions()
.and_then(|accessor| Iter::new(accessor, self.reader.get_buffer_data.clone()));
let normals = morph_target
.normals()
.and_then(|accessor| Iter::new(accessor, self.reader.get_buffer_data.clone()));
let tangents = morph_target
.tangents()
.and_then(|accessor| Iter::new(accessor, self.reader.get_buffer_data.clone()));
(positions, normals, tangents)
})
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.reader.primitive.morph_targets().size_hint()
}
}
impl<'a> ReadColors<'a> {
/// Reinterpret colors as RGB u8, discarding alpha, if present. Lossy if
/// the underlying iterator yields u16, f32 or any RGBA.
pub fn into_rgb_u8(self) -> self::colors::CastingIter<'a, self::colors::RgbU8> {
self::colors::CastingIter::new(self)
}
/// Reinterpret colors as RGB u16, discarding alpha, if present. Lossy if
/// the underlying iterator yields f32 or any RGBA.
pub fn into_rgb_u16(self) -> self::colors::CastingIter<'a, self::colors::RgbU16> {
self::colors::CastingIter::new(self)
}
/// Reinterpret colors as RGB f32, discarding alpha, if present. Lossy if
/// the underlying iterator yields u16 or any RGBA.
pub fn into_rgb_f32(self) -> self::colors::CastingIter<'a, self::colors::RgbF32> {
self::colors::CastingIter::new(self)
}
/// Reinterpret colors as RGBA u8, with default alpha 255. Lossy if the
/// underlying iterator yields u16 or f32.
pub fn into_rgba_u8(self) -> self::colors::CastingIter<'a, self::colors::RgbaU8> {
self::colors::CastingIter::new(self)
}
/// Reinterpret colors as RGBA u16, with default alpha 65535. Lossy if the
/// underlying iterator yields f32.
pub fn into_rgba_u16(self) -> self::colors::CastingIter<'a, self::colors::RgbaU16> {
self::colors::CastingIter::new(self)
}
/// Reinterpret colors as RGBA f32, with default alpha 1.0. Lossy if the
/// underlying iterator yields u16.
pub fn into_rgba_f32(self) -> self::colors::CastingIter<'a, self::colors::RgbaF32> {
self::colors::CastingIter::new(self)
}
}
impl<'a> ReadIndices<'a> {
/// Reinterpret indices as u32, which can fit any possible index.
pub fn into_u32(self) -> self::indices::CastingIter<'a, self::indices::U32> {
self::indices::CastingIter::new(self)
}
}
impl<'a> ReadJoints<'a> {
/// Reinterpret joints as u16, which can fit any possible joint.
pub fn into_u16(self) -> self::joints::CastingIter<'a, self::joints::U16> {
self::joints::CastingIter::new(self)
}
}
impl<'a> ReadTexCoords<'a> {
/// Reinterpret texture coordinates as u8. Lossy if the underlying iterator
/// yields u16 or f32.
pub fn into_u8(self) -> self::tex_coords::CastingIter<'a, self::tex_coords::U8> {
self::tex_coords::CastingIter::new(self)
}
/// Reinterpret texture coordinates as u16. Lossy if the underlying
/// iterator yields f32.
pub fn into_u16(self) -> self::tex_coords::CastingIter<'a, self::tex_coords::U16> {
self::tex_coords::CastingIter::new(self)
}
/// Reinterpret texture coordinates as f32. Lossy if the underlying
/// iterator yields u16.
pub fn into_f32(self) -> self::tex_coords::CastingIter<'a, self::tex_coords::F32> {
self::tex_coords::CastingIter::new(self)
}
}
impl<'a> ReadWeights<'a> {
/// Reinterpret weights as u8. Lossy if the underlying iterator yields u16
/// or f32.
pub fn into_u8(self) -> self::weights::CastingIter<'a, self::weights::U8> {
self::weights::CastingIter::new(self)
}
/// Reinterpret weights as u16. Lossy if the underlying iterator yields
/// f32.
pub fn into_u16(self) -> self::weights::CastingIter<'a, self::weights::U16> {
self::weights::CastingIter::new(self)
}
/// Reinterpret weights as f32. Lossy if the underlying iterator yields
/// u16.
pub fn into_f32(self) -> self::weights::CastingIter<'a, self::weights::F32> {
self::weights::CastingIter::new(self)
}
}

139
vendor/gltf/src/mesh/util/tex_coords.rs vendored Normal file
View File

@@ -0,0 +1,139 @@
use std::marker::PhantomData;
use crate::Normalize;
use super::ReadTexCoords;
/// Casting iterator for `TexCoords`.
#[derive(Clone, Debug)]
pub struct CastingIter<'a, T>(ReadTexCoords<'a>, PhantomData<T>);
/// Type which describes how to cast any texture coordinate into pair of u8.
#[derive(Clone, Debug)]
pub struct U8;
/// Type which describes how to cast any texture coordinate into pair of u16.
#[derive(Clone, Debug)]
pub struct U16;
/// Type which describes how to cast any texture coordinate into pair of f32.
#[derive(Clone, Debug)]
pub struct F32;
/// Trait for types which describe casting behaviour.
pub trait Cast {
/// Output type.
type Output;
/// Cast from u8 pair.
fn cast_u8(x: [u8; 2]) -> Self::Output;
/// Cast from u16 pair.
fn cast_u16(x: [u16; 2]) -> Self::Output;
/// Cast from f32 pair.
fn cast_f32(x: [f32; 2]) -> Self::Output;
}
impl<'a, A> CastingIter<'a, A> {
pub(crate) fn new(iter: ReadTexCoords<'a>) -> Self {
CastingIter(iter, PhantomData)
}
/// Unwrap underlying `TexCoords` object.
pub fn unwrap(self) -> ReadTexCoords<'a> {
self.0
}
}
impl<'a, A: Cast> ExactSizeIterator for CastingIter<'a, A> {}
impl<'a, A: Cast> Iterator for CastingIter<'a, A> {
type Item = A::Output;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
match self.0 {
ReadTexCoords::U8(ref mut i) => i.next().map(A::cast_u8),
ReadTexCoords::U16(ref mut i) => i.next().map(A::cast_u16),
ReadTexCoords::F32(ref mut i) => i.next().map(A::cast_f32),
}
}
#[inline]
fn nth(&mut self, x: usize) -> Option<Self::Item> {
match self.0 {
ReadTexCoords::U8(ref mut i) => i.nth(x).map(A::cast_u8),
ReadTexCoords::U16(ref mut i) => i.nth(x).map(A::cast_u16),
ReadTexCoords::F32(ref mut i) => i.nth(x).map(A::cast_f32),
}
}
fn last(self) -> Option<Self::Item> {
match self.0 {
ReadTexCoords::U8(i) => i.last().map(A::cast_u8),
ReadTexCoords::U16(i) => i.last().map(A::cast_u16),
ReadTexCoords::F32(i) => i.last().map(A::cast_f32),
}
}
fn count(self) -> usize {
self.size_hint().0
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
match self.0 {
ReadTexCoords::U8(ref i) => i.size_hint(),
ReadTexCoords::U16(ref i) => i.size_hint(),
ReadTexCoords::F32(ref i) => i.size_hint(),
}
}
}
impl Cast for U8 {
type Output = [u8; 2];
fn cast_u8(x: [u8; 2]) -> Self::Output {
x.normalize()
}
fn cast_u16(x: [u16; 2]) -> Self::Output {
x.normalize()
}
fn cast_f32(x: [f32; 2]) -> Self::Output {
x.normalize()
}
}
impl Cast for U16 {
type Output = [u16; 2];
fn cast_u8(x: [u8; 2]) -> Self::Output {
x.normalize()
}
fn cast_u16(x: [u16; 2]) -> Self::Output {
x.normalize()
}
fn cast_f32(x: [f32; 2]) -> Self::Output {
x.normalize()
}
}
impl Cast for F32 {
type Output = [f32; 2];
fn cast_u8(x: [u8; 2]) -> Self::Output {
x.normalize()
}
fn cast_u16(x: [u16; 2]) -> Self::Output {
x.normalize()
}
fn cast_f32(x: [f32; 2]) -> Self::Output {
x.normalize()
}
}

139
vendor/gltf/src/mesh/util/weights.rs vendored Normal file
View File

@@ -0,0 +1,139 @@
use std::marker::PhantomData;
use crate::Normalize;
use super::ReadWeights;
/// Casting iterator for `Weights`.
#[derive(Clone, Debug)]
pub struct CastingIter<'a, T>(ReadWeights<'a>, PhantomData<T>);
/// Type which describes how to cast any weight into u8.
#[derive(Clone, Debug)]
pub struct U8;
/// Type which describes how to cast any weight into u16.
#[derive(Clone, Debug)]
pub struct U16;
/// Type which describes how to cast any weight into f32.
#[derive(Clone, Debug)]
pub struct F32;
/// Trait for types which describe casting behaviour.
pub trait Cast {
/// Output type.
type Output;
/// Cast from u8.
fn cast_u8(x: [u8; 4]) -> Self::Output;
/// Cast from u16.
fn cast_u16(x: [u16; 4]) -> Self::Output;
/// Cast from f32.
fn cast_f32(x: [f32; 4]) -> Self::Output;
}
impl<'a, A> CastingIter<'a, A> {
pub(crate) fn new(iter: ReadWeights<'a>) -> Self {
CastingIter(iter, PhantomData)
}
/// Unwrap underlying `Weights` object.
pub fn unwrap(self) -> ReadWeights<'a> {
self.0
}
}
impl<'a, A: Cast> ExactSizeIterator for CastingIter<'a, A> {}
impl<'a, A: Cast> Iterator for CastingIter<'a, A> {
type Item = A::Output;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
match self.0 {
ReadWeights::U8(ref mut i) => i.next().map(A::cast_u8),
ReadWeights::U16(ref mut i) => i.next().map(A::cast_u16),
ReadWeights::F32(ref mut i) => i.next().map(A::cast_f32),
}
}
#[inline]
fn nth(&mut self, x: usize) -> Option<Self::Item> {
match self.0 {
ReadWeights::U8(ref mut i) => i.nth(x).map(A::cast_u8),
ReadWeights::U16(ref mut i) => i.nth(x).map(A::cast_u16),
ReadWeights::F32(ref mut i) => i.nth(x).map(A::cast_f32),
}
}
fn last(self) -> Option<Self::Item> {
match self.0 {
ReadWeights::U8(i) => i.last().map(A::cast_u8),
ReadWeights::U16(i) => i.last().map(A::cast_u16),
ReadWeights::F32(i) => i.last().map(A::cast_f32),
}
}
fn count(self) -> usize {
self.size_hint().0
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
match self.0 {
ReadWeights::U8(ref i) => i.size_hint(),
ReadWeights::U16(ref i) => i.size_hint(),
ReadWeights::F32(ref i) => i.size_hint(),
}
}
}
impl Cast for U8 {
type Output = [u8; 4];
fn cast_u8(x: [u8; 4]) -> Self::Output {
x.normalize()
}
fn cast_u16(x: [u16; 4]) -> Self::Output {
x.normalize()
}
fn cast_f32(x: [f32; 4]) -> Self::Output {
x.normalize()
}
}
impl Cast for U16 {
type Output = [u16; 4];
fn cast_u8(x: [u8; 4]) -> Self::Output {
x.normalize()
}
fn cast_u16(x: [u16; 4]) -> Self::Output {
x.normalize()
}
fn cast_f32(x: [f32; 4]) -> Self::Output {
x.normalize()
}
}
impl Cast for F32 {
type Output = [f32; 4];
fn cast_u8(x: [u8; 4]) -> Self::Output {
x.normalize()
}
fn cast_u16(x: [u16; 4]) -> Self::Output {
x.normalize()
}
fn cast_f32(x: [f32; 4]) -> Self::Output {
x.normalize()
}
}

64
vendor/gltf/src/scene/iter.rs vendored Normal file
View File

@@ -0,0 +1,64 @@
use std::slice;
use crate::{Document, Node};
/// An `Iterator` that visits the nodes in a scene.
#[derive(Clone, Debug)]
pub struct Nodes<'a> {
/// The parent `Document` struct.
pub(crate) document: &'a Document,
/// The internal node index iterator.
pub(crate) iter: slice::Iter<'a, json::Index<json::scene::Node>>,
}
/// An `Iterator` that visits the children of a node.
#[derive(Clone, Debug)]
pub struct Children<'a> {
/// The parent `Document` struct.
pub(crate) document: &'a Document,
/// The internal node index iterator.
pub(crate) iter: slice::Iter<'a, json::Index<json::scene::Node>>,
}
impl<'a> ExactSizeIterator for Nodes<'a> {}
impl<'a> Iterator for Nodes<'a> {
type Item = Node<'a>;
fn next(&mut self) -> Option<Self::Item> {
self.iter
.next()
.map(|index| self.document.nodes().nth(index.value()).unwrap())
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<'a> ExactSizeIterator for Children<'a> {}
impl<'a> Iterator for Children<'a> {
type Item = Node<'a>;
fn next(&mut self) -> Option<Self::Item> {
self.iter
.next()
.map(|index| self.document.nodes().nth(index.value()).unwrap())
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
fn count(self) -> usize {
self.iter.count()
}
fn last(self) -> Option<Self::Item> {
let document = self.document;
self.iter
.last()
.map(|index| document.nodes().nth(index.value()).unwrap())
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter
.nth(n)
.map(|index| self.document.nodes().nth(index.value()).unwrap())
}
}

629
vendor/gltf/src/scene/mod.rs vendored Normal file
View File

@@ -0,0 +1,629 @@
#[cfg(feature = "extensions")]
use serde_json::{Map, Value};
use crate::math::*;
use crate::{Camera, Document, Mesh, Skin};
/// Iterators.
pub mod iter;
/// The transform for a `Node`.
#[derive(Clone, Debug)]
pub enum Transform {
/// 4x4 transformation matrix in column-major order.
Matrix {
/// 4x4 matrix.
matrix: [[f32; 4]; 4],
},
/// Decomposed TRS properties.
Decomposed {
/// `[x, y, z]` vector.
translation: [f32; 3],
/// `[x, y, z, w]` quaternion, where `w` is the scalar.
rotation: [f32; 4],
/// `[x, y, z]` vector.
scale: [f32; 3],
},
}
impl Transform {
/// Returns the matrix representation of this transform.
///
/// If the transform is `Decomposed`, then the matrix is generated with the
/// equation `matrix = translation * rotation * scale`.
pub fn matrix(self) -> [[f32; 4]; 4] {
match self {
Transform::Matrix { matrix } => matrix,
Transform::Decomposed {
translation: t,
rotation: r,
scale: s,
} => {
let t = Matrix4::from_translation(Vector3::new(t[0], t[1], t[2]));
let r = Matrix4::from_quaternion(Quaternion::new(r[3], r[0], r[1], r[2]));
let s = Matrix4::from_nonuniform_scale(s[0], s[1], s[2]);
(t * r * s).as_array()
}
}
}
/// Returns a decomposed representation of this transform.
///
/// If the transform is `Matrix`, then the decomposition is extracted from the
/// matrix.
pub fn decomposed(self) -> ([f32; 3], [f32; 4], [f32; 3]) {
match self {
Transform::Matrix { matrix: m } => {
let translation = [m[3][0], m[3][1], m[3][2]];
#[rustfmt::skip]
let mut i = Matrix3::new(
m[0][0], m[0][1], m[0][2],
m[1][0], m[1][1], m[1][2],
m[2][0], m[2][1], m[2][2],
);
let sx = i.x.magnitude();
let sy = i.y.magnitude();
let sz = i.determinant().signum() * i.z.magnitude();
let scale = [sx, sy, sz];
i.x.multiply(1.0 / sx);
i.y.multiply(1.0 / sy);
i.z.multiply(1.0 / sz);
let r = Quaternion::from_matrix(i);
let rotation = [r.v.x, r.v.y, r.v.z, r.s];
(translation, rotation, scale)
}
Transform::Decomposed {
translation,
rotation,
scale,
} => (translation, rotation, scale),
}
}
}
/// A node in the node hierarchy.
///
/// When a node contains a skin, all its meshes contain `JOINTS_0` and `WEIGHTS_0`
/// attributes.
#[derive(Clone, Debug)]
pub struct Node<'a> {
/// The parent `Document` struct.
document: &'a Document,
/// The corresponding JSON index.
index: usize,
/// The corresponding JSON struct.
json: &'a json::scene::Node,
}
/// The root nodes of a scene.
#[derive(Clone, Debug)]
pub struct Scene<'a> {
/// The parent `Document` struct.
#[allow(dead_code)]
document: &'a Document,
/// The corresponding JSON index.
index: usize,
/// The corresponding JSON struct.
json: &'a json::scene::Scene,
}
impl<'a> Node<'a> {
/// Constructs a `Node`.
pub(crate) fn new(document: &'a Document, index: usize, json: &'a json::scene::Node) -> Self {
Self {
document,
index,
json,
}
}
/// Returns the internal JSON index.
pub fn index(&self) -> usize {
self.index
}
/// Returns the camera referenced by this node.
pub fn camera(&self) -> Option<Camera<'a>> {
self.json
.camera
.as_ref()
.map(|index| self.document.cameras().nth(index.value()).unwrap())
}
/// Returns an `Iterator` that visits the node's children.
pub fn children(&self) -> iter::Children<'a> {
iter::Children {
document: self.document,
iter: self.json.children.as_ref().map_or([].iter(), |x| x.iter()),
}
}
/// Returns extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extensions(&self) -> Option<&Map<String, Value>> {
let ext = self.json.extensions.as_ref()?;
Some(&ext.others)
}
/// Queries extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extension_value(&self, ext_name: &str) -> Option<&Value> {
let ext = self.json.extensions.as_ref()?;
ext.others.get(ext_name)
}
/// Optional application specific data.
pub fn extras(&self) -> &'a json::Extras {
&self.json.extras
}
/// Returns the light at this node as defined by the `KHR_lights_punctual` extension.
#[cfg(feature = "KHR_lights_punctual")]
#[cfg_attr(docsrs, doc(cfg(feature = "KHR_lights_punctual")))]
pub fn light(&self) -> Option<crate::khr_lights_punctual::Light<'a>> {
if let Some(extensions) = self.json.extensions.as_ref() {
if let Some(khr_lights_punctual) = extensions.khr_lights_punctual.as_ref() {
let mut lights = self.document.lights().unwrap();
Some(lights.nth(khr_lights_punctual.light.value()).unwrap())
} else {
None
}
} else {
None
}
}
/// Returns the mesh referenced by this node.
pub fn mesh(&self) -> Option<Mesh<'a>> {
self.json
.mesh
.as_ref()
.map(|index| self.document.meshes().nth(index.value()).unwrap())
}
/// Optional user-defined name for this object.
#[cfg(feature = "names")]
pub fn name(&self) -> Option<&'a str> {
self.json.name.as_deref()
}
/// Returns the node's transform.
pub fn transform(&self) -> Transform {
if let Some(m) = self.json.matrix {
Transform::Matrix {
matrix: [
[m[0], m[1], m[2], m[3]],
[m[4], m[5], m[6], m[7]],
[m[8], m[9], m[10], m[11]],
[m[12], m[13], m[14], m[15]],
],
}
} else {
Transform::Decomposed {
translation: self.json.translation.unwrap_or([0.0, 0.0, 0.0]),
rotation: self.json.rotation.unwrap_or_default().0,
scale: self.json.scale.unwrap_or([1.0, 1.0, 1.0]),
}
}
}
/// Returns the skin referenced by this node.
pub fn skin(&self) -> Option<Skin<'a>> {
self.json
.skin
.as_ref()
.map(|index| self.document.skins().nth(index.value()).unwrap())
}
/// Returns the weights of the instantiated morph target.
pub fn weights(&self) -> Option<&'a [f32]> {
self.json.weights.as_deref()
}
}
impl<'a> Scene<'a> {
/// Constructs a `Scene`.
pub(crate) fn new(document: &'a Document, index: usize, json: &'a json::scene::Scene) -> Self {
Self {
document,
index,
json,
}
}
/// Returns the internal JSON index.
pub fn index(&self) -> usize {
self.index
}
/// Returns extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extensions(&self) -> Option<&Map<String, Value>> {
let ext = self.json.extensions.as_ref()?;
Some(&ext.others)
}
/// Queries extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extension_value(&self, ext_name: &str) -> Option<&Value> {
let ext = self.json.extensions.as_ref()?;
ext.others.get(ext_name)
}
/// Optional application specific data.
pub fn extras(&self) -> &'a json::Extras {
&self.json.extras
}
/// Optional user-defined name for this object.
#[cfg(feature = "names")]
pub fn name(&self) -> Option<&'a str> {
self.json.name.as_deref()
}
/// Returns an `Iterator` that visits each root node of the scene.
pub fn nodes(&self) -> iter::Nodes<'a> {
iter::Nodes {
document: self.document,
iter: self.json.nodes.iter(),
}
}
}
#[cfg(test)]
mod tests {
use crate::math::*;
use crate::scene::Transform;
use std::f32::consts::PI;
fn rotate(x: f32, y: f32, z: f32, r: f32) -> [f32; 4] {
let r = Quaternion::from_axis_angle(Vector3::new(x, y, z).normalize(), r);
[r.v.x, r.v.y, r.v.z, r.s]
}
fn test_decompose(translation: [f32; 3], rotation: [f32; 4], scale: [f32; 3]) {
let matrix = Transform::Decomposed {
translation,
rotation,
scale,
}
.matrix();
let (translation, rotation, scale) = Transform::Matrix { matrix }.decomposed();
let check = Transform::Decomposed {
translation,
rotation,
scale,
}
.matrix();
assert_relative_eq!(
Matrix4::from_array(check),
Matrix4::from_array(matrix),
epsilon = 0.05
);
}
fn test_decompose_rotation(rotation: [f32; 4]) {
let translation = [1.0, -2.0, 3.0];
let scale = [1.0, 1.0, 1.0];
test_decompose(translation, rotation, scale);
}
fn test_decompose_scale(scale: [f32; 3]) {
let translation = [1.0, 2.0, 3.0];
let rotation = rotate(1.0, 0.0, 0.0, PI / 2.0);
test_decompose(translation, rotation, scale);
}
fn test_decompose_translation(translation: [f32; 3]) {
let rotation = [0.0, 0.0, 0.0, 1.0];
let scale = [1.0, 1.0, 1.0];
test_decompose(translation, rotation, scale);
}
#[test]
fn decompose_identity() {
let translation = [0.0, 0.0, 0.0];
let rotation = [0.0, 0.0, 0.0, 1.0];
let scale = [1.0, 1.0, 1.0];
test_decompose(translation, rotation, scale);
}
#[test]
fn decompose_translation_unit_x() {
let translation = [1.0, 0.0, 0.0];
test_decompose_translation(translation);
}
#[test]
fn decompose_translation_unit_y() {
let translation = [0.0, 1.0, 0.0];
test_decompose_translation(translation);
}
#[test]
fn decompose_translation_unit_z() {
let translation = [0.0, 0.0, 1.0];
test_decompose_translation(translation);
}
#[test]
fn decompose_translation_random0() {
let translation = [1.0, -1.0, 1.0];
test_decompose_translation(translation);
}
#[test]
fn decompose_translation_random1() {
let translation = [-1.0, -1.0, -1.0];
test_decompose_translation(translation);
}
#[test]
fn decompose_translation_random2() {
let translation = [-10.0, 100000.0, -0.0001];
test_decompose_translation(translation);
}
#[test]
fn decompose_rotation_xaxis() {
let rotation = rotate(1.0, 0.0, 0.0, PI / 2.0);
test_decompose_rotation(rotation);
}
#[test]
fn decompose_rotation_yaxis() {
let rotation = rotate(0.0, 1.0, 0.0, PI / 2.0);
test_decompose_rotation(rotation);
}
#[test]
fn decompose_rotation_zaxis() {
let rotation = rotate(0.0, 0.0, 1.0, PI / 2.0);
test_decompose_rotation(rotation);
}
#[test]
fn decompose_rotation_negative_xaxis() {
let rotation = rotate(-1.0, 0.0, 0.0, PI / 2.0);
test_decompose_rotation(rotation);
}
#[test]
fn decompose_rotation_negative_yaxis() {
let rotation = rotate(0.0, -1.0, 0.0, PI / 2.0);
test_decompose_rotation(rotation);
}
#[test]
fn decompose_rotation_negative_zaxis() {
let rotation = rotate(0.0, 0.0, -1.0, PI / 2.0);
test_decompose_rotation(rotation);
}
#[test]
fn decompose_rotation_eighth_turn() {
let rotation = rotate(1.0, 0.0, 0.0, PI / 4.0);
test_decompose_rotation(rotation);
}
#[test]
fn decompose_rotation_negative_quarter_turn() {
let rotation = rotate(0.0, 1.0, 0.0, -PI / 2.0);
test_decompose_rotation(rotation);
}
#[test]
fn decompose_rotation_half_turn() {
let rotation = rotate(0.0, 0.0, 1.0, PI);
test_decompose_rotation(rotation);
}
#[test]
fn decompose_rotation_zero_turn_xaxis() {
let rotation = rotate(1.0, 0.0, 0.0, 0.0);
test_decompose_rotation(rotation);
}
#[test]
fn decompose_rotation_zero_turn_yaxis() {
let rotation = rotate(0.0, 1.0, 0.0, 0.0);
test_decompose_rotation(rotation);
}
#[test]
fn decompose_rotation_zero_turn_zaxis() {
let rotation = rotate(0.0, 0.0, 1.0, 0.0);
test_decompose_rotation(rotation);
}
#[test]
fn decompose_rotation_full_turn() {
let rotation = rotate(1.0, 0.0, 0.0, 2.0 * PI);
test_decompose_rotation(rotation);
}
#[test]
fn decompose_rotation_random0() {
let rotation = rotate(1.0, 1.0, 1.0, PI / 3.0);
test_decompose_rotation(rotation);
}
#[test]
fn decompose_rotation_random1() {
let rotation = rotate(1.0, -1.0, 1.0, -PI / 6.0);
test_decompose_rotation(rotation);
}
#[test]
fn decompose_uniform_scale_up() {
let scale = [100.0, 100.0, 100.0];
test_decompose_scale(scale);
}
#[test]
fn decompose_uniform_scale_down() {
let scale = [0.01, 0.01, 0.01];
test_decompose_scale(scale);
}
#[test]
fn decompose_xscale_up() {
let scale = [100.0, 1.0, 1.0];
test_decompose_scale(scale);
}
#[test]
fn decompose_xscale_down() {
let scale = [0.001, 1.0, 1.0];
test_decompose_scale(scale);
}
#[test]
fn decompose_yscale_up() {
let scale = [1.0, 100.0, 1.0];
test_decompose_scale(scale);
}
#[test]
fn decompose_yscale_down() {
let scale = [1.0, 0.001, 1.0];
test_decompose_scale(scale);
}
#[test]
fn decompose_zscale_up() {
let scale = [1.0, 1.0, 100.0];
test_decompose_scale(scale);
}
#[test]
fn decompose_zscale_down() {
let scale = [1.0, 1.0, 0.001];
test_decompose_scale(scale);
}
#[test]
fn decompose_negative_xscale_unit() {
let scale = [-1.0, 1.0, 1.0];
test_decompose_scale(scale);
}
#[test]
fn decompose_negative_xscale_up() {
let scale = [-10.0, 1.0, 1.0];
test_decompose_scale(scale);
}
#[test]
fn decompose_negative_xscale_down() {
let scale = [-0.1, 1.0, 1.0];
test_decompose_scale(scale);
}
#[test]
fn decompose_negative_yscale_unit() {
let scale = [1.0, -1.0, 1.0];
test_decompose_scale(scale);
}
#[test]
fn decompose_negative_yscale_up() {
let scale = [1.0, -10.0, 1.0];
test_decompose_scale(scale);
}
#[test]
fn decompose_negative_yscale_down() {
let scale = [1.0, -0.1, 1.0];
test_decompose_scale(scale);
}
#[test]
fn decompose_negative_zscale_unit() {
let scale = [1.0, 1.0, -1.0];
test_decompose_scale(scale);
}
#[test]
fn decompose_negative_zscale_up() {
let scale = [1.0, 1.0, -10.0];
test_decompose_scale(scale);
}
#[test]
fn decompose_negative_zscale_down() {
let scale = [1.0, 1.0, -0.1];
test_decompose_scale(scale);
}
#[test]
fn decompose_nonuniform_scale_up_sml() {
let scale = [10.0, 100.0, 1000.0];
test_decompose_scale(scale);
}
#[test]
fn decompose_nonuniform_scale_up_mls() {
let scale = [100.0, 1000.0, 10.0];
test_decompose_scale(scale);
}
#[test]
fn decompose_nonuniform_scale_up_lsm() {
let scale = [1000.0, 10.0, 100.0];
test_decompose_scale(scale);
}
#[test]
fn decompose_nonuniform_scale_down_sml() {
let scale = [0.01, 0.001, 0.0001];
test_decompose_scale(scale);
}
#[test]
fn decompose_nonuniform_scale_down_mls() {
let scale = [0.001, 0.0001, 0.01];
test_decompose_scale(scale);
}
#[test]
fn decompose_nonuniform_scale_down_lsm() {
let scale = [0.0001, 0.01, 0.01];
test_decompose_scale(scale);
}
#[test]
fn decompose_nonuniform_scale_unit_ls() {
let scale = [1.0, 100000.0, 0.000001];
test_decompose_scale(scale);
}
#[test]
fn decompose_nonuniform_scale_ms_negative_unit() {
let scale = [10.0, 0.1, -1.0];
test_decompose_scale(scale);
}
#[test]
fn decompose_nonuniform_scale_ms_negative_up() {
let scale = [10.0, 0.1, -10.0];
test_decompose_scale(scale);
}
#[test]
fn decompose_nonuniform_scale_ms_negative_down() {
let scale = [10.0, 0.1, -0.1];
test_decompose_scale(scale);
}
}

40
vendor/gltf/src/skin/iter.rs vendored Normal file
View File

@@ -0,0 +1,40 @@
use std::slice;
use crate::{Document, Node};
/// An `Iterator` that visits the joints of a `Skin`.
#[derive(Clone, Debug)]
pub struct Joints<'a> {
/// The parent `Document` struct.
pub(crate) document: &'a Document,
/// The internal node index iterator.
pub(crate) iter: slice::Iter<'a, json::Index<json::scene::Node>>,
}
impl<'a> ExactSizeIterator for Joints<'a> {}
impl<'a> Iterator for Joints<'a> {
type Item = Node<'a>;
fn next(&mut self) -> Option<Self::Item> {
self.iter
.next()
.map(|index| self.document.nodes().nth(index.value()).unwrap())
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
fn count(self) -> usize {
self.iter.count()
}
fn last(self) -> Option<Self::Item> {
let document = self.document;
self.iter
.last()
.map(|index| document.nodes().nth(index.value()).unwrap())
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter
.nth(n)
.map(|index| self.document.nodes().nth(index.value()).unwrap())
}
}

118
vendor/gltf/src/skin/mod.rs vendored Normal file
View File

@@ -0,0 +1,118 @@
#[cfg(feature = "extensions")]
use serde_json::{Map, Value};
use crate::{Accessor, Document, Node};
#[cfg(feature = "utils")]
use crate::Buffer;
/// Iterators.
pub mod iter;
/// Utility functions.
#[cfg(feature = "utils")]
#[cfg_attr(docsrs, doc(cfg(feature = "utils")))]
pub mod util;
#[cfg(feature = "utils")]
#[doc(inline)]
pub use self::util::Reader;
/// Joints and matrices defining a skin.
#[derive(Clone, Debug)]
pub struct Skin<'a> {
/// The parent `Document` struct.
document: &'a Document,
/// The corresponding JSON index.
index: usize,
/// The corresponding JSON struct.
json: &'a json::skin::Skin,
}
impl<'a> Skin<'a> {
/// Constructs a `Skin`.
pub(crate) fn new(document: &'a Document, index: usize, json: &'a json::skin::Skin) -> Self {
Self {
document,
index,
json,
}
}
/// Returns the internal JSON index.
pub fn index(&self) -> usize {
self.index
}
/// Returns extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extensions(&self) -> Option<&Map<String, Value>> {
let ext = self.json.extensions.as_ref()?;
Some(&ext.others)
}
/// Queries extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extension_value(&self, ext_name: &str) -> Option<&Value> {
let ext = self.json.extensions.as_ref()?;
ext.others.get(ext_name)
}
/// Optional application specific data.
pub fn extras(&self) -> &'a json::Extras {
&self.json.extras
}
/// Returns the accessor containing the 4x4 inverse-bind matrices.
///
/// When `None`, each matrix is assumed to be the 4x4 identity matrix which
/// implies that the inverse-bind matrices were pre-applied.
pub fn inverse_bind_matrices(&self) -> Option<Accessor<'a>> {
self.json
.inverse_bind_matrices
.as_ref()
.map(|index| self.document.accessors().nth(index.value()).unwrap())
}
/// Constructs a skin reader.
#[cfg(feature = "utils")]
#[cfg_attr(docsrs, doc(cfg(feature = "utils")))]
pub fn reader<'s, F>(&'a self, get_buffer_data: F) -> Reader<'a, 's, F>
where
F: Clone + Fn(Buffer<'a>) -> Option<&'s [u8]>,
{
Reader {
skin: self.clone(),
get_buffer_data,
}
}
/// Returns an `Iterator` that visits the skeleton nodes used as joints in
/// this skin.
pub fn joints(&self) -> iter::Joints<'a> {
iter::Joints {
document: self.document,
iter: self.json.joints.iter(),
}
}
/// Optional user-defined name for this object.
#[cfg(feature = "names")]
#[cfg_attr(docsrs, doc(cfg(feature = "names")))]
pub fn name(&self) -> Option<&'a str> {
self.json.name.as_deref()
}
/// Returns the node used as the skeleton root. When `None`, joints
/// transforms resolve to scene root.
pub fn skeleton(&self) -> Option<Node<'a>> {
self.json
.skeleton
.as_ref()
.map(|index| self.document.nodes().nth(index.value()).unwrap())
}
}

29
vendor/gltf/src/skin/util.rs vendored Normal file
View File

@@ -0,0 +1,29 @@
use crate::accessor;
use crate::{Buffer, Skin};
/// Inverse Bind Matrices of type `[[f32; 4]; 4]`.
pub type ReadInverseBindMatrices<'a> = accessor::Iter<'a, [[f32; 4]; 4]>;
/// Skin reader.
#[derive(Clone, Debug)]
pub struct Reader<'a, 's, F>
where
F: Clone + Fn(Buffer<'a>) -> Option<&'s [u8]>,
{
pub(crate) skin: Skin<'a>,
pub(crate) get_buffer_data: F,
}
impl<'a, 's, F> Reader<'a, 's, F>
where
F: Clone + Fn(Buffer<'a>) -> Option<&'s [u8]>,
{
/// Returns an `Iterator` that reads the inverse bind matrices of
/// the skin.
pub fn read_inverse_bind_matrices(&self) -> Option<ReadInverseBindMatrices<'s>> {
self.skin
.inverse_bind_matrices()
.and_then(|accessor| accessor::Iter::new(accessor, self.get_buffer_data.clone()))
}
}

304
vendor/gltf/src/texture.rs vendored Normal file
View File

@@ -0,0 +1,304 @@
use crate::{image, Document};
pub use json::texture::{MagFilter, MinFilter, WrappingMode};
#[cfg(feature = "extensions")]
use serde_json::{Map, Value};
lazy_static! {
static ref DEFAULT_SAMPLER: json::texture::Sampler = Default::default();
}
/// A reference to a `Texture`.
#[derive(Clone, Debug)]
pub struct Info<'a> {
/// The parent `Texture` struct.
texture: Texture<'a>,
/// The corresponding JSON struct.
json: &'a json::texture::Info,
}
/// Texture sampler properties for filtering and wrapping modes.
#[derive(Clone, Debug)]
pub struct Sampler<'a> {
/// The parent `Document` struct.
#[allow(dead_code)]
document: &'a Document,
/// The corresponding JSON index - `None` when the default sampler.
index: Option<usize>,
/// The corresponding JSON struct.
json: &'a json::texture::Sampler,
}
/// A texture and its sampler.
#[derive(Clone, Debug)]
pub struct Texture<'a> {
/// The parent `Document` struct.
document: &'a Document,
/// The corresponding JSON index.
index: usize,
/// The corresponding JSON struct.
json: &'a json::texture::Texture,
}
impl<'a> Sampler<'a> {
/// Constructs a `Sampler`.
pub(crate) fn new(
document: &'a Document,
index: usize,
json: &'a json::texture::Sampler,
) -> Self {
Self {
document,
index: Some(index),
json,
}
}
/// Constructs the default `Sampler`.
pub(crate) fn default(document: &'a Document) -> Self {
Self {
document,
index: None,
json: &DEFAULT_SAMPLER,
}
}
/// Returns the internal JSON index if this `Sampler` was explicity defined.
///
/// This function returns `None` if the `Sampler` is the default sampler.
pub fn index(&self) -> Option<usize> {
self.index
}
/// Magnification filter.
pub fn mag_filter(&self) -> Option<MagFilter> {
self.json.mag_filter.map(|filter| filter.unwrap())
}
/// Minification filter.
pub fn min_filter(&self) -> Option<MinFilter> {
self.json.min_filter.map(|filter| filter.unwrap())
}
/// Optional user-defined name for this object.
#[cfg(feature = "names")]
pub fn name(&self) -> Option<&str> {
self.json.name.as_deref()
}
/// `s` wrapping mode.
pub fn wrap_s(&self) -> WrappingMode {
self.json.wrap_s.unwrap()
}
/// `t` wrapping mode.
pub fn wrap_t(&self) -> WrappingMode {
self.json.wrap_t.unwrap()
}
/// Returns extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extensions(&self) -> Option<&Map<String, Value>> {
let ext = self.json.extensions.as_ref()?;
Some(&ext.others)
}
/// Queries extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extension_value(&self, ext_name: &str) -> Option<&Value> {
let ext = self.json.extensions.as_ref()?;
ext.others.get(ext_name)
}
/// Optional application specific data.
pub fn extras(&self) -> &json::Extras {
&self.json.extras
}
}
impl<'a> Texture<'a> {
/// Constructs a `Texture`.
pub(crate) fn new(
document: &'a Document,
index: usize,
json: &'a json::texture::Texture,
) -> Self {
Self {
document,
index,
json,
}
}
/// Returns the internal JSON index.
pub fn index(&self) -> usize {
self.index
}
/// Optional user-defined name for this object.
#[cfg(feature = "names")]
pub fn name(&self) -> Option<&str> {
self.json.name.as_deref()
}
/// Returns the sampler used by this texture.
pub fn sampler(&self) -> Sampler<'a> {
self.json
.sampler
.as_ref()
.map(|index| self.document.samplers().nth(index.value()).unwrap())
.unwrap_or_else(|| Sampler::default(self.document))
}
/// Returns the image used by this texture.
#[cfg(feature = "allow_empty_texture")]
pub fn source(&self) -> Option<image::Image<'a>> {
let index = self.json.source.value();
if index == u32::MAX as usize {
None
} else {
Some(self.document.images().nth(index).unwrap())
}
}
/// Returns the image used by this texture.
#[cfg(not(feature = "allow_empty_texture"))]
pub fn source(&self) -> image::Image<'a> {
self.document
.images()
.nth(self.json.source.value())
.unwrap()
}
/// Returns extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extensions(&self) -> Option<&Map<String, Value>> {
let ext = self.json.extensions.as_ref()?;
Some(&ext.others)
}
/// Queries extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extension_value(&self, ext_name: &str) -> Option<&Value> {
let ext = self.json.extensions.as_ref()?;
ext.others.get(ext_name)
}
/// Optional application specific data.
pub fn extras(&self) -> &json::Extras {
&self.json.extras
}
}
impl<'a> Info<'a> {
/// Constructs a reference to a `Texture`.
pub(crate) fn new(texture: Texture<'a>, json: &'a json::texture::Info) -> Self {
Self { texture, json }
}
/// The set index of the texture's `TEXCOORD` attribute.
pub fn tex_coord(&self) -> u32 {
self.json.tex_coord
}
/// Returns the referenced `Texture`.
pub fn texture(&self) -> Texture<'a> {
self.texture.clone()
}
/// Returns texture transform information
#[cfg(feature = "KHR_texture_transform")]
#[cfg_attr(docsrs, doc(cfg(feature = "KHR_texture_transform")))]
pub fn texture_transform(&self) -> Option<TextureTransform<'a>> {
self.json
.extensions
.as_ref()?
.texture_transform
.as_ref()
.map(TextureTransform::new)
}
/// Returns extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extensions(&self) -> Option<&Map<String, Value>> {
let ext = self.json.extensions.as_ref()?;
Some(&ext.others)
}
/// Queries extension data unknown to this crate version.
#[cfg(feature = "extensions")]
#[cfg_attr(docsrs, doc(cfg(feature = "extensions")))]
pub fn extension_value(&self, ext_name: &str) -> Option<&Value> {
let ext = self.json.extensions.as_ref()?;
ext.others.get(ext_name)
}
/// Optional application specific data.
pub fn extras(&self) -> &json::Extras {
&self.json.extras
}
}
impl<'a> AsRef<Texture<'a>> for Info<'a> {
fn as_ref(&self) -> &Texture<'a> {
&self.texture
}
}
/// Many techniques can be used to optimize resource usage for a 3d scene.
/// Chief among them is the ability to minimize the number of textures the GPU must load.
/// To achieve this, many engines encourage packing many objects' low-resolution textures into a single large texture atlas.
/// The region of the resulting atlas that corresponds with each object is then defined by vertical and horizontal offsets,
/// and the width and height of the region.
///
/// To support this use case, this extension adds `offset`, `rotation`, and `scale` properties to textureInfo structures.
/// These properties would typically be implemented as an affine transform on the UV coordinates.
#[cfg(feature = "KHR_texture_transform")]
pub struct TextureTransform<'a> {
/// The corresponding JSON struct.
json: &'a json::extensions::texture::TextureTransform,
}
#[cfg(feature = "KHR_texture_transform")]
impl<'a> TextureTransform<'a> {
/// Constructs `TextureTransform`
pub(crate) fn new(json: &'a json::extensions::texture::TextureTransform) -> Self {
Self { json }
}
/// The offset of the UV coordinate origin as a factor of the texture dimensions.
pub fn offset(&self) -> [f32; 2] {
self.json.offset.0
}
/// Rotate the UVs by this many radians counter-clockwise around the origin.
/// This is equivalent to a similar rotation of the image clockwise.
pub fn rotation(&self) -> f32 {
self.json.rotation.0
}
/// The scale factor applied to the components of the UV coordinates.
pub fn scale(&self) -> [f32; 2] {
self.json.scale.0
}
/// Overrides the textureInfo texCoord value if supplied, and if this extension is supported.
pub fn tex_coord(&self) -> Option<u32> {
self.json.tex_coord
}
/// Optional application specific data.
pub fn extras(&self) -> &json::Extras {
&self.json.extras
}
}

View File

@@ -0,0 +1,95 @@
use std::error::Error as StdError;
use std::{fs, path};
const SAMPLE_MODELS_DIRECTORY_PATH: &str = "glTF-Sample-Assets/Models";
fn check_import_result(
result: gltf::Result<(
gltf::Document,
Vec<gltf::buffer::Data>,
Vec<gltf::image::Data>,
)>,
) {
use gltf::json::validation::Error;
match result {
Err(gltf::Error::Validation(errors)) => {
assert!(errors
.iter()
.all(|(_path, error)| *error == Error::Unsupported));
println!("skipped");
}
Err(otherwise) => {
panic!("{otherwise:#?}");
}
Ok((document, buffer_data, image_data)) => {
// Check buffers.
assert_eq!(document.buffers().len(), buffer_data.len());
for (buf, data) in document.buffers().zip(buffer_data.iter()) {
assert!((buf.length() + 3) & !3 <= data.0.len())
}
// Check images.
assert_eq!(document.images().len(), image_data.len());
println!("ok");
}
}
}
fn run() -> Result<(), Box<dyn StdError>> {
let sample_dir_path = path::Path::new(SAMPLE_MODELS_DIRECTORY_PATH);
for entry in fs::read_dir(sample_dir_path)? {
let entry = entry?;
let metadata = entry.metadata()?;
if metadata.is_dir() {
let entry_path = entry.path();
if let Some(file_name) = entry_path.file_name() {
// Import standard glTF.
let mut gltf_path = entry_path.join("glTF").join(file_name);
gltf_path.set_extension("gltf");
if gltf_path.exists() {
print!("{}: ", gltf_path.display());
check_import_result(gltf::import(&gltf_path));
}
// Import standard glTF with embedded buffer and image data.
let mut gle_path = entry_path.join("glTF-Embedded").join(file_name);
gle_path.set_extension("gltf");
if gle_path.exists() {
print!("{}: ", gle_path.display());
check_import_result(gltf::import(&gle_path));
}
// Import binary glTF.
let mut glb_path = entry_path.join("glTF-Binary").join(file_name);
glb_path.set_extension("glb");
if glb_path.exists() {
print!("{}: ", glb_path.display());
check_import_result(gltf::import(&glb_path));
}
}
}
}
sparse_accessor_without_buffer_view_test()
}
/// Test a file with a sparse accessor with no buffer view
fn sparse_accessor_without_buffer_view_test() -> Result<(), Box<dyn StdError>> {
let glb_path = path::Path::new("tests/box_sparse.glb");
print!("{}: ", glb_path.display());
check_import_result(gltf::import(glb_path));
let gltf_path = path::Path::new("tests/box_sparse.gltf");
print!("{}: ", gltf_path.display());
check_import_result(gltf::import(gltf_path));
Ok(())
}
#[test]
fn import_sample_models() {
if let Err(error) = run() {
panic!("import failed: {:?}", error);
}
}

View File

@@ -0,0 +1,89 @@
//! Roundtrip test.
//!
//! Read some binary glTF, write it to disk, and compare to the original.
//! The test will succeed if the output is the same as the original.
use std::io::Read;
use std::{boxed, error, fs, io, path};
const SAMPLE_MODELS_DIRECTORY_PATH: &str = "glTF-Sample-Assets/Models";
fn run() -> Result<(), boxed::Box<dyn error::Error>> {
let mut all_tests_passed = true;
let mut nr_test_cases = 0;
for entry in fs::read_dir(SAMPLE_MODELS_DIRECTORY_PATH)? {
let entry = entry?;
let metadata = entry.metadata()?;
if metadata.is_dir() {
let entry_path = entry.path();
if let Some(file_name) = entry_path.file_name() {
let mut path = entry_path.join("glTF-Binary").join(file_name);
path.set_extension("glb");
if path.exists() {
// not all models have binary versions
if let Err(err) = test(&path) {
println!("{:?}: error: {:?}", path, err);
all_tests_passed = false;
} else {
println!("{:?}: ok", path);
nr_test_cases += 1;
}
}
}
}
}
if sparse_accessor_without_buffer_view_test() {
nr_test_cases += 1;
} else {
all_tests_passed = false;
}
assert!(all_tests_passed);
assert!(nr_test_cases >= 25);
Ok(())
}
fn test(path: &path::Path) -> Result<(), boxed::Box<dyn error::Error>> {
let file = fs::File::open(path)?;
let length = file.metadata()?.len() as usize;
let mut reader = io::BufReader::new(file);
let mut original = Vec::with_capacity(length);
reader.read_to_end(&mut original)?;
// Check from_reader/to_vec implementation.
{
let glb = gltf::binary::Glb::from_reader(io::Cursor::new(&original))?;
let output = glb.to_vec()?;
assert_eq!(&original, &output);
}
// Check from_slice/to_writer implementation.
{
let glb = gltf::binary::Glb::from_slice(&original)?;
let mut output = Vec::with_capacity(length);
glb.to_writer(&mut output as &mut dyn io::Write)?;
assert_eq!(&original, &output);
}
Ok(())
}
/// Test a file with a sparse accessor with no buffer view.
///
/// Return true if the test passes, and false otherwise.
fn sparse_accessor_without_buffer_view_test() -> bool {
let path = path::Path::new("tests/box_sparse.glb");
if let Err(err) = test(path) {
println!("{:?}: error: {:?}", path, err);
false
} else {
println!("{:?}: ok", path);
true
}
}
#[test]
fn roundtrip_binary_gltf() {
run().expect("test failure");
}

128
vendor/gltf/tests/test_wrapper.rs vendored Normal file
View File

@@ -0,0 +1,128 @@
use std::io::Read;
use std::{fs, io};
use gltf::mesh::Bounds;
#[test]
fn test_accessor_bounds() {
// file derived from minimal.gltf with changed min/max values
let file = fs::File::open("tests/minimal_accessor_min_max.gltf").unwrap();
let mut reader = io::BufReader::new(file);
let mut buffer = vec![];
reader.read_to_end(&mut buffer).unwrap();
let gltf = gltf::Gltf::from_slice(&buffer).unwrap();
let mesh = &gltf.meshes().next().unwrap();
let prim = mesh.primitives().next().unwrap();
let bounds = prim.bounding_box();
assert_eq!(
bounds,
Bounds {
min: [-0.03, -0.04, -0.05],
max: [1.0, 1.01, 0.02]
}
);
}
/// "SimpleSparseAccessor.gltf" contains positions specified with a sparse accessor.
/// The accessor use a base `bufferView` that contains 14 `Vec3`s and the sparse
/// section overwrites 3 of these with other values when read.
const SIMPLE_SPARSE_ACCESSOR_GLTF: &str =
"glTF-Sample-Assets/Models/SimpleSparseAccessor/glTF-Embedded/SimpleSparseAccessor.gltf";
#[test]
fn test_sparse_accessor_with_base_buffer_view_yield_exact_size_hints() {
let (document, buffers, _) = gltf::import(SIMPLE_SPARSE_ACCESSOR_GLTF).unwrap();
let mesh = document.meshes().next().unwrap();
let primitive = mesh.primitives().next().unwrap();
let reader = primitive
.reader(|buffer: gltf::Buffer| buffers.get(buffer.index()).map(|data| &data.0[..]));
let mut positions = reader.read_positions().unwrap();
const EXPECTED_POSITION_COUNT: usize = 14;
for i in (0..=EXPECTED_POSITION_COUNT).rev() {
assert_eq!(positions.size_hint(), (i, Some(i)));
positions.next();
}
}
#[test]
fn test_sparse_accessor_with_base_buffer_view_yield_all_values() {
let (document, buffers, _) = gltf::import(SIMPLE_SPARSE_ACCESSOR_GLTF).unwrap();
let mesh = document.meshes().next().unwrap();
let primitive = mesh.primitives().next().unwrap();
let reader = primitive
.reader(|buffer: gltf::Buffer| buffers.get(buffer.index()).map(|data| &data.0[..]));
let positions: Vec<[f32; 3]> = reader.read_positions().unwrap().collect::<Vec<_>>();
const EXPECTED_POSITIONS: [[f32; 3]; 14] = [
[0.0, 0.0, 0.0],
[1.0, 0.0, 0.0],
[2.0, 0.0, 0.0],
[3.0, 0.0, 0.0],
[4.0, 0.0, 0.0],
[5.0, 0.0, 0.0],
[6.0, 0.0, 0.0],
[0.0, 1.0, 0.0],
[1.0, 2.0, 0.0], // Sparse value #1
[2.0, 1.0, 0.0],
[3.0, 3.0, 0.0], // Sparse value #2
[4.0, 1.0, 0.0],
[5.0, 4.0, 0.0], // Sparse value #3
[6.0, 1.0, 0.0],
];
assert_eq!(positions.len(), EXPECTED_POSITIONS.len());
for (i, p) in positions.iter().enumerate() {
for (j, q) in p.iter().enumerate() {
assert_eq!(q - EXPECTED_POSITIONS[i][j], 0.0);
}
}
}
/// "box_sparse.gltf" contains an animation with a sampler with output of two values.
/// The values are specified with a sparse accessor that is missing a base `bufferView` field.
/// Which means that each value in it will be 0.0, except the values contained in the sparse
/// buffer view itself. In this case the second value is read from the sparse accessor (1.0),
/// while the first is left at the default zero.
const BOX_SPARSE_GLTF: &str = "tests/box_sparse.gltf";
#[test]
fn test_sparse_accessor_without_base_buffer_view_yield_exact_size_hints() {
let (document, buffers, _) = gltf::import(BOX_SPARSE_GLTF).unwrap();
let animation = document.animations().next().unwrap();
let sampler = animation.samplers().next().unwrap();
let output_accessor = sampler.output();
let mut outputs_iter =
gltf::accessor::Iter::<f32>::new(output_accessor, |buffer: gltf::Buffer| {
buffers.get(buffer.index()).map(|data| &data.0[..])
})
.unwrap();
const EXPECTED_OUTPUT_COUNT: usize = 2;
for i in (0..=EXPECTED_OUTPUT_COUNT).rev() {
assert_eq!(outputs_iter.size_hint(), (i, Some(i)));
outputs_iter.next();
}
}
#[test]
fn test_sparse_accessor_without_base_buffer_view_yield_all_values() {
let (document, buffers, _) = gltf::import(BOX_SPARSE_GLTF).unwrap();
let animation = document.animations().next().unwrap();
let sampler = animation.samplers().next().unwrap();
let output_accessor = sampler.output();
let output_iter = gltf::accessor::Iter::<f32>::new(output_accessor, |buffer: gltf::Buffer| {
buffers.get(buffer.index()).map(|data| &data.0[..])
})
.unwrap();
let outputs = output_iter.collect::<Vec<_>>();
const EXPECTED_OUTPUTS: [f32; 2] = [0.0, 1.0];
assert_eq!(outputs.len(), EXPECTED_OUTPUTS.len());
for (i, o) in outputs.iter().enumerate() {
assert_eq!(o - EXPECTED_OUTPUTS[i], 0.0);
}
}