Vendor dependencies for 0.3.0 release

This commit is contained in:
2025-09-27 10:29:08 -05:00
parent 0c8d39d483
commit 82ab7f317b
26803 changed files with 16134934 additions and 0 deletions

View File

@@ -0,0 +1 @@
{"files":{"Cargo.lock":"8ab92b18bcf21484778e312a3d98c6006fe7523118282ef7682ca36d38ea72f2","Cargo.toml":"5046d1e324725e54c94628e236481f82aeee76371ae4d047d815b6a012317a44","LICENSE-APACHE":"a6cba85bc92e0cff7a450b1d873c0eaa2e9fc96bf472df0247a26bec77bf3ff9","LICENSE-MIT":"508a77d2e7b51d98adeed32648ad124b7b30241a8e70b2e72c99f92d8e5874d1","src/component.rs":"30723a1b2ac1ae644201f56f13b08e02b125ee8e3187efc1cadb9629aab8174f","src/lib.rs":"1c82b8612e4341b74d3609ee9953af61274f192bd8fd9a46694cf7933dffdc99","src/query_data.rs":"17dd2fe88867cffcf73077c99467fc4e977edc93784902b19e2c3c0850a2335b","src/query_filter.rs":"92da36386e7c5dc0c247696dbd14c936916aa446a5ec832e711092c7d46ec721","src/states.rs":"16513345c4fea003594459b00f12e1861817c9931c81d665f72a4a542df51dbb","src/world_query.rs":"cd548f57c453ff1d23410927f00bef17d1f5f6abd81cd9811b0551c8269e8061"},"package":"38748d6f3339175c582d751f410fb60a93baf2286c3deb7efebb0878dce7f413"}

257
vendor/bevy_ecs_macros/Cargo.lock generated vendored Normal file
View File

@@ -0,0 +1,257 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 4
[[package]]
name = "autocfg"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26"
[[package]]
name = "bevy_ecs_macros"
version = "0.16.1"
dependencies = [
"bevy_macro_utils",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "bevy_macro_utils"
version = "0.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "052eeebcb8e7e072beea5031b227d9a290f8a7fbbb947573ab6ec81df0fb94be"
dependencies = [
"parking_lot",
"proc-macro2",
"quote",
"syn",
"toml_edit",
]
[[package]]
name = "bitflags"
version = "2.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd"
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "equivalent"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
[[package]]
name = "hashbrown"
version = "0.15.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "84b26c544d002229e640969970a2e74021aadf6e2f96372b9c58eff97de08eb3"
[[package]]
name = "indexmap"
version = "2.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e"
dependencies = [
"equivalent",
"hashbrown",
]
[[package]]
name = "libc"
version = "0.2.172"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa"
[[package]]
name = "lock_api"
version = "0.4.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17"
dependencies = [
"autocfg",
"scopeguard",
]
[[package]]
name = "memchr"
version = "2.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
[[package]]
name = "parking_lot"
version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27"
dependencies = [
"lock_api",
"parking_lot_core",
]
[[package]]
name = "parking_lot_core"
version = "0.9.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8"
dependencies = [
"cfg-if",
"libc",
"redox_syscall",
"smallvec",
"windows-targets",
]
[[package]]
name = "proc-macro2"
version = "1.0.95"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d"
dependencies = [
"proc-macro2",
]
[[package]]
name = "redox_syscall"
version = "0.5.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "928fca9cf2aa042393a8325b9ead81d2f0df4cb12e1e24cef072922ccd99c5af"
dependencies = [
"bitflags",
]
[[package]]
name = "scopeguard"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
[[package]]
name = "smallvec"
version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8917285742e9f3e1683f0a9c4e6b57960b7314d0b08d30d1ecd426713ee2eee9"
[[package]]
name = "syn"
version = "2.0.101"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ce2b7fc941b3a24138a0a7cf8e858bfc6a992e7978a068a5c760deb0ed43caf"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "toml_datetime"
version = "0.6.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3da5db5a963e24bc68be8b17b6fa82814bb22ee8660f192bb182771d498f09a3"
[[package]]
name = "toml_edit"
version = "0.22.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "310068873db2c5b3e7659d2cc35d21855dbafa50d1ce336397c666e3cb08137e"
dependencies = [
"indexmap",
"toml_datetime",
"winnow",
]
[[package]]
name = "unicode-ident"
version = "1.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
[[package]]
name = "windows-targets"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
dependencies = [
"windows_aarch64_gnullvm",
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_gnullvm",
"windows_i686_msvc",
"windows_x86_64_gnu",
"windows_x86_64_gnullvm",
"windows_x86_64_msvc",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
[[package]]
name = "windows_aarch64_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
[[package]]
name = "windows_i686_gnu"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
[[package]]
name = "windows_i686_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
[[package]]
name = "windows_i686_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
[[package]]
name = "windows_x86_64_gnu"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
[[package]]
name = "windows_x86_64_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
[[package]]
name = "winnow"
version = "0.7.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c06928c8748d81b05c9be96aad92e1b6ff01833332f281e8cfca3be4b35fc9ec"
dependencies = [
"memchr",
]

89
vendor/bevy_ecs_macros/Cargo.toml vendored Normal file
View File

@@ -0,0 +1,89 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies.
#
# If you are reading this file be aware that the original Cargo.toml
# will likely look very different (and much more reasonable).
# See Cargo.toml.orig for the original contents.
[package]
edition = "2024"
name = "bevy_ecs_macros"
version = "0.16.1"
build = false
autolib = false
autobins = false
autoexamples = false
autotests = false
autobenches = false
description = "Bevy ECS Macros"
readme = false
license = "MIT OR Apache-2.0"
resolver = "2"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = [
"-Zunstable-options",
"--generate-link-to-definition",
]
[lib]
name = "bevy_ecs_macros"
path = "src/lib.rs"
proc-macro = true
[dependencies.bevy_macro_utils]
version = "0.16.1"
[dependencies.proc-macro2]
version = "1.0"
[dependencies.quote]
version = "1.0"
[dependencies.syn]
version = "2.0.99"
features = [
"full",
"extra-traits",
]
[lints.clippy]
alloc_instead_of_core = "warn"
allow_attributes = "warn"
allow_attributes_without_reason = "warn"
doc_markdown = "warn"
manual_let_else = "warn"
match_same_arms = "warn"
needless_lifetimes = "allow"
nonstandard_macro_braces = "warn"
print_stderr = "warn"
print_stdout = "warn"
ptr_as_ptr = "warn"
ptr_cast_constness = "warn"
redundant_closure_for_method_calls = "warn"
redundant_else = "warn"
ref_as_ptr = "warn"
semicolon_if_nothing_returned = "warn"
std_instead_of_alloc = "warn"
std_instead_of_core = "warn"
too_long_first_doc_paragraph = "allow"
too_many_arguments = "allow"
type_complexity = "allow"
undocumented_unsafe_blocks = "warn"
unwrap_or_default = "warn"
[lints.rust]
missing_docs = "warn"
unsafe_code = "deny"
unsafe_op_in_unsafe_fn = "warn"
unused_qualifications = "warn"
[lints.rust.unexpected_cfgs]
level = "warn"
priority = 0
check-cfg = ["cfg(docsrs_dep)"]

176
vendor/bevy_ecs_macros/LICENSE-APACHE vendored Normal file
View File

@@ -0,0 +1,176 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS

19
vendor/bevy_ecs_macros/LICENSE-MIT vendored Normal file
View File

@@ -0,0 +1,19 @@
MIT License
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

853
vendor/bevy_ecs_macros/src/component.rs vendored Normal file
View File

@@ -0,0 +1,853 @@
use proc_macro::TokenStream;
use proc_macro2::{Span, TokenStream as TokenStream2};
use quote::{format_ident, quote, ToTokens};
use std::collections::HashSet;
use syn::{
braced, parenthesized,
parse::Parse,
parse_macro_input, parse_quote,
punctuated::Punctuated,
spanned::Spanned,
token::{Brace, Comma, Paren},
Data, DataEnum, DataStruct, DeriveInput, Expr, ExprCall, ExprPath, Field, Fields, Ident,
LitStr, Member, Path, Result, Token, Type, Visibility,
};
pub const EVENT: &str = "event";
pub const AUTO_PROPAGATE: &str = "auto_propagate";
pub const TRAVERSAL: &str = "traversal";
pub fn derive_event(input: TokenStream) -> TokenStream {
let mut ast = parse_macro_input!(input as DeriveInput);
let mut auto_propagate = false;
let mut traversal: Type = parse_quote!(());
let bevy_ecs_path: Path = crate::bevy_ecs_path();
ast.generics
.make_where_clause()
.predicates
.push(parse_quote! { Self: Send + Sync + 'static });
if let Some(attr) = ast.attrs.iter().find(|attr| attr.path().is_ident(EVENT)) {
if let Err(e) = attr.parse_nested_meta(|meta| match meta.path.get_ident() {
Some(ident) if ident == AUTO_PROPAGATE => {
auto_propagate = true;
Ok(())
}
Some(ident) if ident == TRAVERSAL => {
traversal = meta.value()?.parse()?;
Ok(())
}
Some(ident) => Err(meta.error(format!("unsupported attribute: {}", ident))),
None => Err(meta.error("expected identifier")),
}) {
return e.to_compile_error().into();
}
}
let struct_name = &ast.ident;
let (impl_generics, type_generics, where_clause) = &ast.generics.split_for_impl();
TokenStream::from(quote! {
impl #impl_generics #bevy_ecs_path::event::Event for #struct_name #type_generics #where_clause {
type Traversal = #traversal;
const AUTO_PROPAGATE: bool = #auto_propagate;
}
})
}
pub fn derive_resource(input: TokenStream) -> TokenStream {
let mut ast = parse_macro_input!(input as DeriveInput);
let bevy_ecs_path: Path = crate::bevy_ecs_path();
ast.generics
.make_where_clause()
.predicates
.push(parse_quote! { Self: Send + Sync + 'static });
let struct_name = &ast.ident;
let (impl_generics, type_generics, where_clause) = &ast.generics.split_for_impl();
TokenStream::from(quote! {
impl #impl_generics #bevy_ecs_path::resource::Resource for #struct_name #type_generics #where_clause {
}
})
}
pub fn derive_component(input: TokenStream) -> TokenStream {
let mut ast = parse_macro_input!(input as DeriveInput);
let bevy_ecs_path: Path = crate::bevy_ecs_path();
let attrs = match parse_component_attr(&ast) {
Ok(attrs) => attrs,
Err(e) => return e.into_compile_error().into(),
};
let relationship = match derive_relationship(&ast, &attrs, &bevy_ecs_path) {
Ok(value) => value,
Err(err) => err.into_compile_error().into(),
};
let relationship_target = match derive_relationship_target(&ast, &attrs, &bevy_ecs_path) {
Ok(value) => value,
Err(err) => err.into_compile_error().into(),
};
let map_entities = map_entities(
&ast.data,
Ident::new("this", Span::call_site()),
relationship.is_some(),
relationship_target.is_some(),
).map(|map_entities_impl| quote! {
fn map_entities<M: #bevy_ecs_path::entity::EntityMapper>(this: &mut Self, mapper: &mut M) {
use #bevy_ecs_path::entity::MapEntities;
#map_entities_impl
}
});
let storage = storage_path(&bevy_ecs_path, attrs.storage);
let on_add_path = attrs
.on_add
.map(|path| path.to_token_stream(&bevy_ecs_path));
let on_remove_path = attrs
.on_remove
.map(|path| path.to_token_stream(&bevy_ecs_path));
let on_insert_path = if relationship.is_some() {
if attrs.on_insert.is_some() {
return syn::Error::new(
ast.span(),
"Custom on_insert hooks are not supported as relationships already define an on_insert hook",
)
.into_compile_error()
.into();
}
Some(quote!(<Self as #bevy_ecs_path::relationship::Relationship>::on_insert))
} else {
attrs
.on_insert
.map(|path| path.to_token_stream(&bevy_ecs_path))
};
let on_replace_path = if relationship.is_some() {
if attrs.on_replace.is_some() {
return syn::Error::new(
ast.span(),
"Custom on_replace hooks are not supported as Relationships already define an on_replace hook",
)
.into_compile_error()
.into();
}
Some(quote!(<Self as #bevy_ecs_path::relationship::Relationship>::on_replace))
} else if attrs.relationship_target.is_some() {
if attrs.on_replace.is_some() {
return syn::Error::new(
ast.span(),
"Custom on_replace hooks are not supported as RelationshipTarget already defines an on_replace hook",
)
.into_compile_error()
.into();
}
Some(quote!(<Self as #bevy_ecs_path::relationship::RelationshipTarget>::on_replace))
} else {
attrs
.on_replace
.map(|path| path.to_token_stream(&bevy_ecs_path))
};
let on_despawn_path = if attrs
.relationship_target
.is_some_and(|target| target.linked_spawn)
{
if attrs.on_despawn.is_some() {
return syn::Error::new(
ast.span(),
"Custom on_despawn hooks are not supported as this RelationshipTarget already defines an on_despawn hook, via the 'linked_spawn' attribute",
)
.into_compile_error()
.into();
}
Some(quote!(<Self as #bevy_ecs_path::relationship::RelationshipTarget>::on_despawn))
} else {
attrs
.on_despawn
.map(|path| path.to_token_stream(&bevy_ecs_path))
};
let on_add = hook_register_function_call(&bevy_ecs_path, quote! {on_add}, on_add_path);
let on_insert = hook_register_function_call(&bevy_ecs_path, quote! {on_insert}, on_insert_path);
let on_replace =
hook_register_function_call(&bevy_ecs_path, quote! {on_replace}, on_replace_path);
let on_remove = hook_register_function_call(&bevy_ecs_path, quote! {on_remove}, on_remove_path);
let on_despawn =
hook_register_function_call(&bevy_ecs_path, quote! {on_despawn}, on_despawn_path);
ast.generics
.make_where_clause()
.predicates
.push(parse_quote! { Self: Send + Sync + 'static });
let requires = &attrs.requires;
let mut register_required = Vec::with_capacity(attrs.requires.iter().len());
let mut register_recursive_requires = Vec::with_capacity(attrs.requires.iter().len());
if let Some(requires) = requires {
for require in requires {
let ident = &require.path;
register_recursive_requires.push(quote! {
<#ident as #bevy_ecs_path::component::Component>::register_required_components(
requiree,
components,
required_components,
inheritance_depth + 1,
recursion_check_stack
);
});
match &require.func {
Some(func) => {
register_required.push(quote! {
components.register_required_components_manual::<Self, #ident>(
required_components,
|| { let x: #ident = (#func)().into(); x },
inheritance_depth,
recursion_check_stack
);
});
}
None => {
register_required.push(quote! {
components.register_required_components_manual::<Self, #ident>(
required_components,
<#ident as Default>::default,
inheritance_depth,
recursion_check_stack
);
});
}
}
}
}
let struct_name = &ast.ident;
let (impl_generics, type_generics, where_clause) = &ast.generics.split_for_impl();
let required_component_docs = attrs.requires.map(|r| {
let paths = r
.iter()
.map(|r| format!("[`{}`]", r.path.to_token_stream()))
.collect::<Vec<_>>()
.join(", ");
let doc = format!("**Required Components**: {paths}. \n\n A component's Required Components are inserted whenever it is inserted. Note that this will also insert the required components _of_ the required components, recursively, in depth-first order.");
quote! {
#[doc = #doc]
}
});
let mutable_type = (attrs.immutable || relationship.is_some())
.then_some(quote! { #bevy_ecs_path::component::Immutable })
.unwrap_or(quote! { #bevy_ecs_path::component::Mutable });
let clone_behavior = if relationship_target.is_some() {
quote!(#bevy_ecs_path::component::ComponentCloneBehavior::Custom(#bevy_ecs_path::relationship::clone_relationship_target::<Self>))
} else {
quote!(
use #bevy_ecs_path::component::{DefaultCloneBehaviorBase, DefaultCloneBehaviorViaClone};
(&&&#bevy_ecs_path::component::DefaultCloneBehaviorSpecialization::<Self>::default()).default_clone_behavior()
)
};
// This puts `register_required` before `register_recursive_requires` to ensure that the constructors of _all_ top
// level components are initialized first, giving them precedence over recursively defined constructors for the same component type
TokenStream::from(quote! {
#required_component_docs
impl #impl_generics #bevy_ecs_path::component::Component for #struct_name #type_generics #where_clause {
const STORAGE_TYPE: #bevy_ecs_path::component::StorageType = #storage;
type Mutability = #mutable_type;
fn register_required_components(
requiree: #bevy_ecs_path::component::ComponentId,
components: &mut #bevy_ecs_path::component::ComponentsRegistrator,
required_components: &mut #bevy_ecs_path::component::RequiredComponents,
inheritance_depth: u16,
recursion_check_stack: &mut #bevy_ecs_path::__macro_exports::Vec<#bevy_ecs_path::component::ComponentId>
) {
#bevy_ecs_path::component::enforce_no_required_components_recursion(components, recursion_check_stack);
let self_id = components.register_component::<Self>();
recursion_check_stack.push(self_id);
#(#register_required)*
#(#register_recursive_requires)*
recursion_check_stack.pop();
}
#on_add
#on_insert
#on_replace
#on_remove
#on_despawn
fn clone_behavior() -> #bevy_ecs_path::component::ComponentCloneBehavior {
#clone_behavior
}
#map_entities
}
#relationship
#relationship_target
})
}
const ENTITIES: &str = "entities";
pub(crate) fn map_entities(
data: &Data,
self_ident: Ident,
is_relationship: bool,
is_relationship_target: bool,
) -> Option<TokenStream2> {
match data {
Data::Struct(DataStruct { fields, .. }) => {
let mut map = Vec::with_capacity(fields.len());
let relationship = if is_relationship || is_relationship_target {
relationship_field(fields, "MapEntities", fields.span()).ok()
} else {
None
};
fields
.iter()
.enumerate()
.filter(|(_, field)| {
field.attrs.iter().any(|a| a.path().is_ident(ENTITIES))
|| relationship.is_some_and(|relationship| relationship == *field)
})
.for_each(|(index, field)| {
let field_member = field
.ident
.clone()
.map_or(Member::from(index), Member::Named);
map.push(quote!(#self_ident.#field_member.map_entities(mapper);));
});
if map.is_empty() {
return None;
};
Some(quote!(
#(#map)*
))
}
Data::Enum(DataEnum { variants, .. }) => {
let mut map = Vec::with_capacity(variants.len());
for variant in variants.iter() {
let field_members = variant
.fields
.iter()
.enumerate()
.filter(|(_, field)| field.attrs.iter().any(|a| a.path().is_ident(ENTITIES)))
.map(|(index, field)| {
field
.ident
.clone()
.map_or(Member::from(index), Member::Named)
})
.collect::<Vec<_>>();
let ident = &variant.ident;
let field_idents = field_members
.iter()
.map(|member| format_ident!("__self_{}", member))
.collect::<Vec<_>>();
map.push(
quote!(Self::#ident {#(#field_members: #field_idents,)* ..} => {
#(#field_idents.map_entities(mapper);)*
}),
);
}
if map.is_empty() {
return None;
};
Some(quote!(
match #self_ident {
#(#map,)*
_ => {}
}
))
}
Data::Union(_) => None,
}
}
pub const COMPONENT: &str = "component";
pub const STORAGE: &str = "storage";
pub const REQUIRE: &str = "require";
pub const RELATIONSHIP: &str = "relationship";
pub const RELATIONSHIP_TARGET: &str = "relationship_target";
pub const ON_ADD: &str = "on_add";
pub const ON_INSERT: &str = "on_insert";
pub const ON_REPLACE: &str = "on_replace";
pub const ON_REMOVE: &str = "on_remove";
pub const ON_DESPAWN: &str = "on_despawn";
pub const IMMUTABLE: &str = "immutable";
/// All allowed attribute value expression kinds for component hooks
#[derive(Debug)]
enum HookAttributeKind {
/// expressions like function or struct names
///
/// structs will throw compile errors on the code generation so this is safe
Path(ExprPath),
/// function call like expressions
Call(ExprCall),
}
impl HookAttributeKind {
fn from_expr(value: Expr) -> Result<Self> {
match value {
Expr::Path(path) => Ok(HookAttributeKind::Path(path)),
Expr::Call(call) => Ok(HookAttributeKind::Call(call)),
// throw meaningful error on all other expressions
_ => Err(syn::Error::new(
value.span(),
[
"Not supported in this position, please use one of the following:",
"- path to function",
"- call to function yielding closure",
]
.join("\n"),
)),
}
}
fn to_token_stream(&self, bevy_ecs_path: &Path) -> TokenStream2 {
match self {
HookAttributeKind::Path(path) => path.to_token_stream(),
HookAttributeKind::Call(call) => {
quote!({
fn _internal_hook(world: #bevy_ecs_path::world::DeferredWorld, ctx: #bevy_ecs_path::component::HookContext) {
(#call)(world, ctx)
}
_internal_hook
})
}
}
}
}
impl Parse for HookAttributeKind {
fn parse(input: syn::parse::ParseStream) -> Result<Self> {
input.parse::<Expr>().and_then(Self::from_expr)
}
}
struct Attrs {
storage: StorageTy,
requires: Option<Punctuated<Require, Comma>>,
on_add: Option<HookAttributeKind>,
on_insert: Option<HookAttributeKind>,
on_replace: Option<HookAttributeKind>,
on_remove: Option<HookAttributeKind>,
on_despawn: Option<HookAttributeKind>,
relationship: Option<Relationship>,
relationship_target: Option<RelationshipTarget>,
immutable: bool,
}
#[derive(Clone, Copy)]
enum StorageTy {
Table,
SparseSet,
}
struct Require {
path: Path,
func: Option<TokenStream2>,
}
struct Relationship {
relationship_target: Type,
}
struct RelationshipTarget {
relationship: Type,
linked_spawn: bool,
}
// values for `storage` attribute
const TABLE: &str = "Table";
const SPARSE_SET: &str = "SparseSet";
fn parse_component_attr(ast: &DeriveInput) -> Result<Attrs> {
let mut attrs = Attrs {
storage: StorageTy::Table,
on_add: None,
on_insert: None,
on_replace: None,
on_remove: None,
on_despawn: None,
requires: None,
relationship: None,
relationship_target: None,
immutable: false,
};
let mut require_paths = HashSet::new();
for attr in ast.attrs.iter() {
if attr.path().is_ident(COMPONENT) {
attr.parse_nested_meta(|nested| {
if nested.path.is_ident(STORAGE) {
attrs.storage = match nested.value()?.parse::<LitStr>()?.value() {
s if s == TABLE => StorageTy::Table,
s if s == SPARSE_SET => StorageTy::SparseSet,
s => {
return Err(nested.error(format!(
"Invalid storage type `{s}`, expected '{TABLE}' or '{SPARSE_SET}'.",
)));
}
};
Ok(())
} else if nested.path.is_ident(ON_ADD) {
attrs.on_add = Some(nested.value()?.parse::<HookAttributeKind>()?);
Ok(())
} else if nested.path.is_ident(ON_INSERT) {
attrs.on_insert = Some(nested.value()?.parse::<HookAttributeKind>()?);
Ok(())
} else if nested.path.is_ident(ON_REPLACE) {
attrs.on_replace = Some(nested.value()?.parse::<HookAttributeKind>()?);
Ok(())
} else if nested.path.is_ident(ON_REMOVE) {
attrs.on_remove = Some(nested.value()?.parse::<HookAttributeKind>()?);
Ok(())
} else if nested.path.is_ident(ON_DESPAWN) {
attrs.on_despawn = Some(nested.value()?.parse::<HookAttributeKind>()?);
Ok(())
} else if nested.path.is_ident(IMMUTABLE) {
attrs.immutable = true;
Ok(())
} else {
Err(nested.error("Unsupported attribute"))
}
})?;
} else if attr.path().is_ident(REQUIRE) {
let punctuated =
attr.parse_args_with(Punctuated::<Require, Comma>::parse_terminated)?;
for require in punctuated.iter() {
if !require_paths.insert(require.path.to_token_stream().to_string()) {
return Err(syn::Error::new(
require.path.span(),
"Duplicate required components are not allowed.",
));
}
}
if let Some(current) = &mut attrs.requires {
current.extend(punctuated);
} else {
attrs.requires = Some(punctuated);
}
} else if attr.path().is_ident(RELATIONSHIP) {
let relationship = attr.parse_args::<Relationship>()?;
attrs.relationship = Some(relationship);
} else if attr.path().is_ident(RELATIONSHIP_TARGET) {
let relationship_target = attr.parse_args::<RelationshipTarget>()?;
attrs.relationship_target = Some(relationship_target);
}
}
Ok(attrs)
}
impl Parse for Require {
fn parse(input: syn::parse::ParseStream) -> Result<Self> {
let mut path = input.parse::<Path>()?;
let mut last_segment_is_lower = false;
let mut is_constructor_call = false;
// Use the case of the type name to check if it's an enum
// This doesn't match everything that can be an enum according to the rust spec
// but it matches what clippy is OK with
let is_enum = {
let mut first_chars = path
.segments
.iter()
.rev()
.filter_map(|s| s.ident.to_string().chars().next());
if let Some(last) = first_chars.next() {
if last.is_uppercase() {
if let Some(last) = first_chars.next() {
last.is_uppercase()
} else {
false
}
} else {
last_segment_is_lower = true;
false
}
} else {
false
}
};
let func = if input.peek(Token![=]) {
// If there is an '=', then this is a "function style" require
let _t: syn::Token![=] = input.parse()?;
let expr: Expr = input.parse()?;
let tokens: TokenStream = quote::quote! (|| #expr).into();
Some(TokenStream2::from(tokens))
} else if input.peek(Brace) {
// This is a "value style" named-struct-like require
let content;
braced!(content in input);
let content = content.parse::<TokenStream2>()?;
let tokens: TokenStream = quote::quote! (|| #path { #content }).into();
Some(TokenStream2::from(tokens))
} else if input.peek(Paren) {
// This is a "value style" tuple-struct-like require
let content;
parenthesized!(content in input);
let content = content.parse::<TokenStream2>()?;
is_constructor_call = last_segment_is_lower;
let tokens: TokenStream = quote::quote! (|| #path (#content)).into();
Some(TokenStream2::from(tokens))
} else if is_enum {
// if this is an enum, then it is an inline enum component declaration
let tokens: TokenStream = quote::quote! (|| #path).into();
Some(TokenStream2::from(tokens))
} else {
// if this isn't any of the above, then it is a component ident, which will use Default
None
};
if is_enum || is_constructor_call {
let path_len = path.segments.len();
path = Path {
leading_colon: path.leading_colon,
segments: Punctuated::from_iter(path.segments.into_iter().take(path_len - 1)),
};
}
Ok(Require { path, func })
}
}
fn storage_path(bevy_ecs_path: &Path, ty: StorageTy) -> TokenStream2 {
let storage_type = match ty {
StorageTy::Table => Ident::new("Table", Span::call_site()),
StorageTy::SparseSet => Ident::new("SparseSet", Span::call_site()),
};
quote! { #bevy_ecs_path::component::StorageType::#storage_type }
}
fn hook_register_function_call(
bevy_ecs_path: &Path,
hook: TokenStream2,
function: Option<TokenStream2>,
) -> Option<TokenStream2> {
function.map(|meta| {
quote! {
fn #hook() -> ::core::option::Option<#bevy_ecs_path::component::ComponentHook> {
::core::option::Option::Some(#meta)
}
}
})
}
mod kw {
syn::custom_keyword!(relationship_target);
syn::custom_keyword!(relationship);
syn::custom_keyword!(linked_spawn);
}
impl Parse for Relationship {
fn parse(input: syn::parse::ParseStream) -> Result<Self> {
input.parse::<kw::relationship_target>()?;
input.parse::<Token![=]>()?;
Ok(Relationship {
relationship_target: input.parse::<Type>()?,
})
}
}
impl Parse for RelationshipTarget {
fn parse(input: syn::parse::ParseStream) -> Result<Self> {
let mut relationship: Option<Type> = None;
let mut linked_spawn: bool = false;
while !input.is_empty() {
let lookahead = input.lookahead1();
if lookahead.peek(kw::linked_spawn) {
input.parse::<kw::linked_spawn>()?;
linked_spawn = true;
} else if lookahead.peek(kw::relationship) {
input.parse::<kw::relationship>()?;
input.parse::<Token![=]>()?;
relationship = Some(input.parse()?);
} else {
return Err(lookahead.error());
}
if !input.is_empty() {
input.parse::<Token![,]>()?;
}
}
Ok(RelationshipTarget {
relationship: relationship.ok_or_else(|| {
syn::Error::new(input.span(), "Missing `relationship = X` attribute")
})?,
linked_spawn,
})
}
}
fn derive_relationship(
ast: &DeriveInput,
attrs: &Attrs,
bevy_ecs_path: &Path,
) -> Result<Option<TokenStream2>> {
let Some(relationship) = &attrs.relationship else {
return Ok(None);
};
let Data::Struct(DataStruct {
fields,
struct_token,
..
}) = &ast.data
else {
return Err(syn::Error::new(
ast.span(),
"Relationship can only be derived for structs.",
));
};
let field = relationship_field(fields, "Relationship", struct_token.span())?;
let relationship_member = field.ident.clone().map_or(Member::from(0), Member::Named);
let members = fields
.members()
.filter(|member| member != &relationship_member);
let struct_name = &ast.ident;
let (impl_generics, type_generics, where_clause) = &ast.generics.split_for_impl();
let relationship_target = &relationship.relationship_target;
Ok(Some(quote! {
impl #impl_generics #bevy_ecs_path::relationship::Relationship for #struct_name #type_generics #where_clause {
type RelationshipTarget = #relationship_target;
#[inline(always)]
fn get(&self) -> #bevy_ecs_path::entity::Entity {
self.#relationship_member
}
#[inline]
fn from(entity: #bevy_ecs_path::entity::Entity) -> Self {
Self {
#(#members: core::default::Default::default(),)*
#relationship_member: entity
}
}
}
}))
}
fn derive_relationship_target(
ast: &DeriveInput,
attrs: &Attrs,
bevy_ecs_path: &Path,
) -> Result<Option<TokenStream2>> {
let Some(relationship_target) = &attrs.relationship_target else {
return Ok(None);
};
let Data::Struct(DataStruct {
fields,
struct_token,
..
}) = &ast.data
else {
return Err(syn::Error::new(
ast.span(),
"RelationshipTarget can only be derived for structs.",
));
};
let field = relationship_field(fields, "RelationshipTarget", struct_token.span())?;
if field.vis != Visibility::Inherited {
return Err(syn::Error::new(field.span(), "The collection in RelationshipTarget must be private to prevent users from directly mutating it, which could invalidate the correctness of relationships."));
}
let collection = &field.ty;
let relationship_member = field.ident.clone().map_or(Member::from(0), Member::Named);
let members = fields
.members()
.filter(|member| member != &relationship_member);
let relationship = &relationship_target.relationship;
let struct_name = &ast.ident;
let (impl_generics, type_generics, where_clause) = &ast.generics.split_for_impl();
let linked_spawn = relationship_target.linked_spawn;
Ok(Some(quote! {
impl #impl_generics #bevy_ecs_path::relationship::RelationshipTarget for #struct_name #type_generics #where_clause {
const LINKED_SPAWN: bool = #linked_spawn;
type Relationship = #relationship;
type Collection = #collection;
#[inline]
fn collection(&self) -> &Self::Collection {
&self.#relationship_member
}
#[inline]
fn collection_mut_risky(&mut self) -> &mut Self::Collection {
&mut self.#relationship_member
}
#[inline]
fn from_collection_risky(collection: Self::Collection) -> Self {
Self {
#(#members: core::default::Default::default(),)*
#relationship_member: collection
}
}
}
}))
}
/// Returns the field with the `#[relationship]` attribute, the only field if unnamed,
/// or the only field in a [`Fields::Named`] with one field, otherwise `Err`.
fn relationship_field<'a>(
fields: &'a Fields,
derive: &'static str,
span: Span,
) -> Result<&'a Field> {
match fields {
Fields::Named(fields) if fields.named.len() == 1 => Ok(fields.named.first().unwrap()),
Fields::Named(fields) => fields.named.iter().find(|field| {
field
.attrs
.iter()
.any(|attr| attr.path().is_ident(RELATIONSHIP))
}).ok_or(syn::Error::new(
span,
format!("{derive} derive expected named structs with a single field or with a field annotated with #[relationship].")
)),
Fields::Unnamed(fields) => fields
.unnamed
.len()
.eq(&1)
.then(|| fields.unnamed.first())
.flatten()
.ok_or(syn::Error::new(
span,
format!("{derive} derive expected unnamed structs with one field."),
)),
Fields::Unit => Err(syn::Error::new(
span,
format!("{derive} derive expected named or unnamed struct, found unit struct."),
)),
}
}

617
vendor/bevy_ecs_macros/src/lib.rs vendored Normal file
View File

@@ -0,0 +1,617 @@
#![expect(missing_docs, reason = "Not all docs are written yet, see #3492.")]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
extern crate proc_macro;
mod component;
mod query_data;
mod query_filter;
mod states;
mod world_query;
use crate::{
component::map_entities, query_data::derive_query_data_impl,
query_filter::derive_query_filter_impl,
};
use bevy_macro_utils::{derive_label, ensure_no_collision, get_struct_fields, BevyManifest};
use proc_macro::TokenStream;
use proc_macro2::{Ident, Span};
use quote::{format_ident, quote};
use syn::{
parse_macro_input, parse_quote, punctuated::Punctuated, spanned::Spanned, token::Comma,
ConstParam, Data, DataStruct, DeriveInput, GenericParam, Index, TypeParam,
};
enum BundleFieldKind {
Component,
Ignore,
}
const BUNDLE_ATTRIBUTE_NAME: &str = "bundle";
const BUNDLE_ATTRIBUTE_IGNORE_NAME: &str = "ignore";
#[proc_macro_derive(Bundle, attributes(bundle))]
pub fn derive_bundle(input: TokenStream) -> TokenStream {
let ast = parse_macro_input!(input as DeriveInput);
let ecs_path = bevy_ecs_path();
let named_fields = match get_struct_fields(&ast.data) {
Ok(fields) => fields,
Err(e) => return e.into_compile_error().into(),
};
let mut field_kind = Vec::with_capacity(named_fields.len());
for field in named_fields {
for attr in field
.attrs
.iter()
.filter(|a| a.path().is_ident(BUNDLE_ATTRIBUTE_NAME))
{
if let Err(error) = attr.parse_nested_meta(|meta| {
if meta.path.is_ident(BUNDLE_ATTRIBUTE_IGNORE_NAME) {
field_kind.push(BundleFieldKind::Ignore);
Ok(())
} else {
Err(meta.error(format!(
"Invalid bundle attribute. Use `{BUNDLE_ATTRIBUTE_IGNORE_NAME}`"
)))
}
}) {
return error.into_compile_error().into();
}
}
field_kind.push(BundleFieldKind::Component);
}
let field = named_fields
.iter()
.map(|field| field.ident.as_ref())
.collect::<Vec<_>>();
let field_type = named_fields
.iter()
.map(|field| &field.ty)
.collect::<Vec<_>>();
let mut field_component_ids = Vec::new();
let mut field_get_component_ids = Vec::new();
let mut field_get_components = Vec::new();
let mut field_from_components = Vec::new();
let mut field_required_components = Vec::new();
for (((i, field_type), field_kind), field) in field_type
.iter()
.enumerate()
.zip(field_kind.iter())
.zip(field.iter())
{
match field_kind {
BundleFieldKind::Component => {
field_component_ids.push(quote! {
<#field_type as #ecs_path::bundle::Bundle>::component_ids(components, &mut *ids);
});
field_required_components.push(quote! {
<#field_type as #ecs_path::bundle::Bundle>::register_required_components(components, required_components);
});
field_get_component_ids.push(quote! {
<#field_type as #ecs_path::bundle::Bundle>::get_component_ids(components, &mut *ids);
});
match field {
Some(field) => {
field_get_components.push(quote! {
self.#field.get_components(&mut *func);
});
field_from_components.push(quote! {
#field: <#field_type as #ecs_path::bundle::BundleFromComponents>::from_components(ctx, &mut *func),
});
}
None => {
let index = Index::from(i);
field_get_components.push(quote! {
self.#index.get_components(&mut *func);
});
field_from_components.push(quote! {
#index: <#field_type as #ecs_path::bundle::BundleFromComponents>::from_components(ctx, &mut *func),
});
}
}
}
BundleFieldKind::Ignore => {
field_from_components.push(quote! {
#field: ::core::default::Default::default(),
});
}
}
}
let generics = ast.generics;
let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
let struct_name = &ast.ident;
TokenStream::from(quote! {
// SAFETY:
// - ComponentId is returned in field-definition-order. [get_components] uses field-definition-order
// - `Bundle::get_components` is exactly once for each member. Rely's on the Component -> Bundle implementation to properly pass
// the correct `StorageType` into the callback.
#[allow(deprecated)]
unsafe impl #impl_generics #ecs_path::bundle::Bundle for #struct_name #ty_generics #where_clause {
fn component_ids(
components: &mut #ecs_path::component::ComponentsRegistrator,
ids: &mut impl FnMut(#ecs_path::component::ComponentId)
){
#(#field_component_ids)*
}
fn get_component_ids(
components: &#ecs_path::component::Components,
ids: &mut impl FnMut(Option<#ecs_path::component::ComponentId>)
){
#(#field_get_component_ids)*
}
fn register_required_components(
components: &mut #ecs_path::component::ComponentsRegistrator,
required_components: &mut #ecs_path::component::RequiredComponents
){
#(#field_required_components)*
}
}
// SAFETY:
// - ComponentId is returned in field-definition-order. [from_components] uses field-definition-order
#[allow(deprecated)]
unsafe impl #impl_generics #ecs_path::bundle::BundleFromComponents for #struct_name #ty_generics #where_clause {
#[allow(unused_variables, non_snake_case)]
unsafe fn from_components<__T, __F>(ctx: &mut __T, func: &mut __F) -> Self
where
__F: FnMut(&mut __T) -> #ecs_path::ptr::OwningPtr<'_>
{
Self{
#(#field_from_components)*
}
}
}
#[allow(deprecated)]
impl #impl_generics #ecs_path::bundle::DynamicBundle for #struct_name #ty_generics #where_clause {
type Effect = ();
#[allow(unused_variables)]
#[inline]
fn get_components(
self,
func: &mut impl FnMut(#ecs_path::component::StorageType, #ecs_path::ptr::OwningPtr<'_>)
) {
#(#field_get_components)*
}
}
})
}
#[proc_macro_derive(MapEntities, attributes(entities))]
pub fn derive_map_entities(input: TokenStream) -> TokenStream {
let ast = parse_macro_input!(input as DeriveInput);
let ecs_path = bevy_ecs_path();
let map_entities_impl = map_entities(
&ast.data,
Ident::new("self", Span::call_site()),
false,
false,
);
let struct_name = &ast.ident;
let (impl_generics, type_generics, where_clause) = &ast.generics.split_for_impl();
TokenStream::from(quote! {
impl #impl_generics #ecs_path::entity::MapEntities for #struct_name #type_generics #where_clause {
fn map_entities<M: #ecs_path::entity::EntityMapper>(&mut self, mapper: &mut M) {
#map_entities_impl
}
}
})
}
/// Implement `SystemParam` to use a struct as a parameter in a system
#[proc_macro_derive(SystemParam, attributes(system_param))]
pub fn derive_system_param(input: TokenStream) -> TokenStream {
let token_stream = input.clone();
let ast = parse_macro_input!(input as DeriveInput);
let Data::Struct(DataStruct {
fields: field_definitions,
..
}) = ast.data
else {
return syn::Error::new(
ast.span(),
"Invalid `SystemParam` type: expected a `struct`",
)
.into_compile_error()
.into();
};
let path = bevy_ecs_path();
let mut field_locals = Vec::new();
let mut field_names = Vec::new();
let mut fields = Vec::new();
let mut field_types = Vec::new();
let mut field_messages = Vec::new();
for (i, field) in field_definitions.iter().enumerate() {
field_locals.push(format_ident!("f{i}"));
let i = Index::from(i);
let field_value = field
.ident
.as_ref()
.map(|f| quote! { #f })
.unwrap_or_else(|| quote! { #i });
field_names.push(format!("::{}", field_value));
fields.push(field_value);
field_types.push(&field.ty);
let mut field_message = None;
for meta in field
.attrs
.iter()
.filter(|a| a.path().is_ident("system_param"))
{
if let Err(e) = meta.parse_nested_meta(|nested| {
if nested.path.is_ident("validation_message") {
field_message = Some(nested.value()?.parse()?);
Ok(())
} else {
Err(nested.error("Unsupported attribute"))
}
}) {
return e.into_compile_error().into();
}
}
field_messages.push(field_message.unwrap_or_else(|| quote! { err.message }));
}
let generics = ast.generics;
// Emit an error if there's any unrecognized lifetime names.
for lt in generics.lifetimes() {
let ident = &lt.lifetime.ident;
let w = format_ident!("w");
let s = format_ident!("s");
if ident != &w && ident != &s {
return syn::Error::new_spanned(
lt,
r#"invalid lifetime name: expected `'w` or `'s`
'w -- refers to data stored in the World.
's -- refers to data stored in the SystemParam's state.'"#,
)
.into_compile_error()
.into();
}
}
let (_impl_generics, ty_generics, where_clause) = generics.split_for_impl();
let lifetimeless_generics: Vec<_> = generics
.params
.iter()
.filter(|g| !matches!(g, GenericParam::Lifetime(_)))
.collect();
let shadowed_lifetimes: Vec<_> = generics.lifetimes().map(|_| quote!('_)).collect();
let mut punctuated_generics = Punctuated::<_, Comma>::new();
punctuated_generics.extend(lifetimeless_generics.iter().map(|g| match g {
GenericParam::Type(g) => GenericParam::Type(TypeParam {
default: None,
..g.clone()
}),
GenericParam::Const(g) => GenericParam::Const(ConstParam {
default: None,
..g.clone()
}),
_ => unreachable!(),
}));
let mut punctuated_generic_idents = Punctuated::<_, Comma>::new();
punctuated_generic_idents.extend(lifetimeless_generics.iter().map(|g| match g {
GenericParam::Type(g) => &g.ident,
GenericParam::Const(g) => &g.ident,
_ => unreachable!(),
}));
let punctuated_generics_no_bounds: Punctuated<_, Comma> = lifetimeless_generics
.iter()
.map(|&g| match g.clone() {
GenericParam::Type(mut g) => {
g.bounds.clear();
GenericParam::Type(g)
}
g => g,
})
.collect();
let mut tuple_types: Vec<_> = field_types.iter().map(|x| quote! { #x }).collect();
let mut tuple_patterns: Vec<_> = field_locals.iter().map(|x| quote! { #x }).collect();
// If the number of fields exceeds the 16-parameter limit,
// fold the fields into tuples of tuples until we are below the limit.
const LIMIT: usize = 16;
while tuple_types.len() > LIMIT {
let end = Vec::from_iter(tuple_types.drain(..LIMIT));
tuple_types.push(parse_quote!( (#(#end,)*) ));
let end = Vec::from_iter(tuple_patterns.drain(..LIMIT));
tuple_patterns.push(parse_quote!( (#(#end,)*) ));
}
// Create a where clause for the `ReadOnlySystemParam` impl.
// Ensure that each field implements `ReadOnlySystemParam`.
let mut read_only_generics = generics.clone();
let read_only_where_clause = read_only_generics.make_where_clause();
for field_type in &field_types {
read_only_where_clause
.predicates
.push(syn::parse_quote!(#field_type: #path::system::ReadOnlySystemParam));
}
let fields_alias =
ensure_no_collision(format_ident!("__StructFieldsAlias"), token_stream.clone());
let struct_name = &ast.ident;
let state_struct_visibility = &ast.vis;
let state_struct_name = ensure_no_collision(format_ident!("FetchState"), token_stream);
let mut builder_name = None;
for meta in ast
.attrs
.iter()
.filter(|a| a.path().is_ident("system_param"))
{
if let Err(e) = meta.parse_nested_meta(|nested| {
if nested.path.is_ident("builder") {
builder_name = Some(format_ident!("{struct_name}Builder"));
Ok(())
} else {
Err(nested.error("Unsupported attribute"))
}
}) {
return e.into_compile_error().into();
}
}
let builder = builder_name.map(|builder_name| {
let builder_type_parameters: Vec<_> = (0..fields.len()).map(|i| format_ident!("B{i}")).collect();
let builder_doc_comment = format!("A [`SystemParamBuilder`] for a [`{struct_name}`].");
let builder_struct = quote! {
#[doc = #builder_doc_comment]
struct #builder_name<#(#builder_type_parameters,)*> {
#(#fields: #builder_type_parameters,)*
}
};
let lifetimes: Vec<_> = generics.lifetimes().collect();
let generic_struct = quote!{ #struct_name <#(#lifetimes,)* #punctuated_generic_idents> };
let builder_impl = quote!{
// SAFETY: This delegates to the `SystemParamBuilder` for tuples.
unsafe impl<
#(#lifetimes,)*
#(#builder_type_parameters: #path::system::SystemParamBuilder<#field_types>,)*
#punctuated_generics
> #path::system::SystemParamBuilder<#generic_struct> for #builder_name<#(#builder_type_parameters,)*>
#where_clause
{
fn build(self, world: &mut #path::world::World, meta: &mut #path::system::SystemMeta) -> <#generic_struct as #path::system::SystemParam>::State {
let #builder_name { #(#fields: #field_locals,)* } = self;
#state_struct_name {
state: #path::system::SystemParamBuilder::build((#(#tuple_patterns,)*), world, meta)
}
}
}
};
(builder_struct, builder_impl)
});
let (builder_struct, builder_impl) = builder.unzip();
TokenStream::from(quote! {
// We define the FetchState struct in an anonymous scope to avoid polluting the user namespace.
// The struct can still be accessed via SystemParam::State, e.g. EventReaderState can be accessed via
// <EventReader<'static, 'static, T> as SystemParam>::State
const _: () = {
// Allows rebinding the lifetimes of each field type.
type #fields_alias <'w, 's, #punctuated_generics_no_bounds> = (#(#tuple_types,)*);
#[doc(hidden)]
#state_struct_visibility struct #state_struct_name <#(#lifetimeless_generics,)*>
#where_clause {
state: <#fields_alias::<'static, 'static, #punctuated_generic_idents> as #path::system::SystemParam>::State,
}
unsafe impl<#punctuated_generics> #path::system::SystemParam for
#struct_name <#(#shadowed_lifetimes,)* #punctuated_generic_idents> #where_clause
{
type State = #state_struct_name<#punctuated_generic_idents>;
type Item<'w, 's> = #struct_name #ty_generics;
fn init_state(world: &mut #path::world::World, system_meta: &mut #path::system::SystemMeta) -> Self::State {
#state_struct_name {
state: <#fields_alias::<'_, '_, #punctuated_generic_idents> as #path::system::SystemParam>::init_state(world, system_meta),
}
}
unsafe fn new_archetype(state: &mut Self::State, archetype: &#path::archetype::Archetype, system_meta: &mut #path::system::SystemMeta) {
// SAFETY: The caller ensures that `archetype` is from the World the state was initialized from in `init_state`.
unsafe { <#fields_alias::<'_, '_, #punctuated_generic_idents> as #path::system::SystemParam>::new_archetype(&mut state.state, archetype, system_meta) }
}
fn apply(state: &mut Self::State, system_meta: &#path::system::SystemMeta, world: &mut #path::world::World) {
<#fields_alias::<'_, '_, #punctuated_generic_idents> as #path::system::SystemParam>::apply(&mut state.state, system_meta, world);
}
fn queue(state: &mut Self::State, system_meta: &#path::system::SystemMeta, world: #path::world::DeferredWorld) {
<#fields_alias::<'_, '_, #punctuated_generic_idents> as #path::system::SystemParam>::queue(&mut state.state, system_meta, world);
}
#[inline]
unsafe fn validate_param<'w, 's>(
state: &'s Self::State,
_system_meta: &#path::system::SystemMeta,
_world: #path::world::unsafe_world_cell::UnsafeWorldCell<'w>,
) -> Result<(), #path::system::SystemParamValidationError> {
let #state_struct_name { state: (#(#tuple_patterns,)*) } = state;
#(
<#field_types as #path::system::SystemParam>::validate_param(#field_locals, _system_meta, _world)
.map_err(|err| #path::system::SystemParamValidationError::new::<Self>(err.skipped, #field_messages, #field_names))?;
)*
Result::Ok(())
}
#[inline]
unsafe fn get_param<'w, 's>(
state: &'s mut Self::State,
system_meta: &#path::system::SystemMeta,
world: #path::world::unsafe_world_cell::UnsafeWorldCell<'w>,
change_tick: #path::component::Tick,
) -> Self::Item<'w, 's> {
let (#(#tuple_patterns,)*) = <
(#(#tuple_types,)*) as #path::system::SystemParam
>::get_param(&mut state.state, system_meta, world, change_tick);
#struct_name {
#(#fields: #field_locals,)*
}
}
}
// Safety: Each field is `ReadOnlySystemParam`, so this can only read from the `World`
unsafe impl<'w, 's, #punctuated_generics> #path::system::ReadOnlySystemParam for #struct_name #ty_generics #read_only_where_clause {}
#builder_impl
};
#builder_struct
})
}
/// Implement `QueryData` to use a struct as a data parameter in a query
#[proc_macro_derive(QueryData, attributes(query_data))]
pub fn derive_query_data(input: TokenStream) -> TokenStream {
derive_query_data_impl(input)
}
/// Implement `QueryFilter` to use a struct as a filter parameter in a query
#[proc_macro_derive(QueryFilter, attributes(query_filter))]
pub fn derive_query_filter(input: TokenStream) -> TokenStream {
derive_query_filter_impl(input)
}
/// Derive macro generating an impl of the trait `ScheduleLabel`.
///
/// This does not work for unions.
#[proc_macro_derive(ScheduleLabel)]
pub fn derive_schedule_label(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
let mut trait_path = bevy_ecs_path();
trait_path.segments.push(format_ident!("schedule").into());
let mut dyn_eq_path = trait_path.clone();
trait_path
.segments
.push(format_ident!("ScheduleLabel").into());
dyn_eq_path.segments.push(format_ident!("DynEq").into());
derive_label(input, "ScheduleLabel", &trait_path, &dyn_eq_path)
}
/// Derive macro generating an impl of the trait `SystemSet`.
///
/// This does not work for unions.
#[proc_macro_derive(SystemSet)]
pub fn derive_system_set(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
let mut trait_path = bevy_ecs_path();
trait_path.segments.push(format_ident!("schedule").into());
let mut dyn_eq_path = trait_path.clone();
trait_path.segments.push(format_ident!("SystemSet").into());
dyn_eq_path.segments.push(format_ident!("DynEq").into());
derive_label(input, "SystemSet", &trait_path, &dyn_eq_path)
}
pub(crate) fn bevy_ecs_path() -> syn::Path {
BevyManifest::shared().get_path("bevy_ecs")
}
#[proc_macro_derive(Event, attributes(event))]
pub fn derive_event(input: TokenStream) -> TokenStream {
component::derive_event(input)
}
#[proc_macro_derive(Resource)]
pub fn derive_resource(input: TokenStream) -> TokenStream {
component::derive_resource(input)
}
#[proc_macro_derive(
Component,
attributes(component, require, relationship, relationship_target, entities)
)]
pub fn derive_component(input: TokenStream) -> TokenStream {
component::derive_component(input)
}
#[proc_macro_derive(States)]
pub fn derive_states(input: TokenStream) -> TokenStream {
states::derive_states(input)
}
#[proc_macro_derive(SubStates, attributes(source))]
pub fn derive_substates(input: TokenStream) -> TokenStream {
states::derive_substates(input)
}
#[proc_macro_derive(FromWorld, attributes(from_world))]
pub fn derive_from_world(input: TokenStream) -> TokenStream {
let bevy_ecs_path = bevy_ecs_path();
let ast = parse_macro_input!(input as DeriveInput);
let name = ast.ident;
let (impl_generics, ty_generics, where_clauses) = ast.generics.split_for_impl();
let (fields, variant_ident) = match &ast.data {
Data::Struct(data) => (&data.fields, None),
Data::Enum(data) => {
match data.variants.iter().find(|variant| {
variant
.attrs
.iter()
.any(|attr| attr.path().is_ident("from_world"))
}) {
Some(variant) => (&variant.fields, Some(&variant.ident)),
None => {
return syn::Error::new(
Span::call_site(),
"No variant found with the `#[from_world]` attribute",
)
.into_compile_error()
.into();
}
}
}
Data::Union(_) => {
return syn::Error::new(
Span::call_site(),
"#[derive(FromWorld)]` does not support unions",
)
.into_compile_error()
.into();
}
};
let field_init_expr = quote!(#bevy_ecs_path::world::FromWorld::from_world(world));
let members = fields.members();
let field_initializers = match variant_ident {
Some(variant_ident) => quote!( Self::#variant_ident {
#(#members: #field_init_expr),*
}),
None => quote!( Self {
#(#members: #field_init_expr),*
}),
};
TokenStream::from(quote! {
impl #impl_generics #bevy_ecs_path::world::FromWorld for #name #ty_generics #where_clauses {
fn from_world(world: &mut #bevy_ecs_path::world::World) -> Self {
#field_initializers
}
}
})
}

442
vendor/bevy_ecs_macros/src/query_data.rs vendored Normal file
View File

@@ -0,0 +1,442 @@
use bevy_macro_utils::ensure_no_collision;
use proc_macro::TokenStream;
use proc_macro2::{Ident, Span};
use quote::{format_ident, quote};
use syn::{
parse_macro_input, parse_quote, punctuated::Punctuated, token, token::Comma, Attribute, Data,
DataStruct, DeriveInput, Field, Index, Meta,
};
use crate::{
bevy_ecs_path,
world_query::{item_struct, world_query_impl},
};
#[derive(Default)]
struct QueryDataAttributes {
pub is_mutable: bool,
pub derive_args: Punctuated<Meta, Comma>,
}
static MUTABLE_ATTRIBUTE_NAME: &str = "mutable";
static DERIVE_ATTRIBUTE_NAME: &str = "derive";
mod field_attr_keywords {
syn::custom_keyword!(ignore);
}
pub static QUERY_DATA_ATTRIBUTE_NAME: &str = "query_data";
pub fn derive_query_data_impl(input: TokenStream) -> TokenStream {
let tokens = input.clone();
let ast = parse_macro_input!(input as DeriveInput);
let visibility = ast.vis;
let mut attributes = QueryDataAttributes::default();
for attr in &ast.attrs {
if attr
.path()
.get_ident()
.is_none_or(|ident| ident != QUERY_DATA_ATTRIBUTE_NAME)
{
continue;
}
let result = attr.parse_nested_meta(|meta| {
if meta.path.is_ident(MUTABLE_ATTRIBUTE_NAME) {
attributes.is_mutable = true;
if meta.input.peek(token::Paren) {
Err(meta.error(format_args!("`{MUTABLE_ATTRIBUTE_NAME}` does not take any arguments")))
} else {
Ok(())
}
} else if meta.path.is_ident(DERIVE_ATTRIBUTE_NAME) {
meta.parse_nested_meta(|meta| {
attributes.derive_args.push(Meta::Path(meta.path));
Ok(())
}).map_err(|_| {
meta.error(format_args!("`{DERIVE_ATTRIBUTE_NAME}` requires at least one argument"))
})
} else {
Err(meta.error(format_args!("invalid attribute, expected `{MUTABLE_ATTRIBUTE_NAME}` or `{DERIVE_ATTRIBUTE_NAME}`")))
}
});
if let Err(err) = result {
return err.to_compile_error().into();
}
}
let path = bevy_ecs_path();
let user_generics = ast.generics.clone();
let (user_impl_generics, user_ty_generics, user_where_clauses) = user_generics.split_for_impl();
let user_generics_with_world = {
let mut generics = ast.generics;
generics.params.insert(0, parse_quote!('__w));
generics
};
let (user_impl_generics_with_world, user_ty_generics_with_world, user_where_clauses_with_world) =
user_generics_with_world.split_for_impl();
let struct_name = ast.ident;
let read_only_struct_name = if attributes.is_mutable {
Ident::new(&format!("{struct_name}ReadOnly"), Span::call_site())
} else {
struct_name.clone()
};
let item_struct_name = Ident::new(&format!("{struct_name}Item"), Span::call_site());
let read_only_item_struct_name = if attributes.is_mutable {
Ident::new(&format!("{struct_name}ReadOnlyItem"), Span::call_site())
} else {
item_struct_name.clone()
};
let fetch_struct_name = Ident::new(&format!("{struct_name}Fetch"), Span::call_site());
let fetch_struct_name = ensure_no_collision(fetch_struct_name, tokens.clone());
let read_only_fetch_struct_name = if attributes.is_mutable {
let new_ident = Ident::new(&format!("{struct_name}ReadOnlyFetch"), Span::call_site());
ensure_no_collision(new_ident, tokens.clone())
} else {
fetch_struct_name.clone()
};
let marker_name =
ensure_no_collision(format_ident!("_world_query_derive_marker"), tokens.clone());
// Generate a name for the state struct that doesn't conflict
// with the struct definition.
let state_struct_name = Ident::new(&format!("{struct_name}State"), Span::call_site());
let state_struct_name = ensure_no_collision(state_struct_name, tokens);
let Data::Struct(DataStruct { fields, .. }) = &ast.data else {
return syn::Error::new(
Span::call_site(),
"#[derive(QueryData)]` only supports structs",
)
.into_compile_error()
.into();
};
let mut field_attrs = Vec::new();
let mut field_visibilities = Vec::new();
let mut field_idents = Vec::new();
let mut named_field_idents = Vec::new();
let mut field_types = Vec::new();
let mut read_only_field_types = Vec::new();
for (i, field) in fields.iter().enumerate() {
let attrs = match read_world_query_field_info(field) {
Ok(QueryDataFieldInfo { attrs }) => attrs,
Err(e) => return e.into_compile_error().into(),
};
let named_field_ident = field
.ident
.as_ref()
.cloned()
.unwrap_or_else(|| format_ident!("f{i}"));
let i = Index::from(i);
let field_ident = field
.ident
.as_ref()
.map_or(quote! { #i }, |i| quote! { #i });
field_idents.push(field_ident);
named_field_idents.push(named_field_ident);
field_attrs.push(attrs);
field_visibilities.push(field.vis.clone());
let field_ty = field.ty.clone();
field_types.push(quote!(#field_ty));
read_only_field_types.push(quote!(<#field_ty as #path::query::QueryData>::ReadOnly));
}
let derive_args = &attributes.derive_args;
// `#[derive()]` is valid syntax
let derive_macro_call = quote! { #[derive(#derive_args)] };
let mutable_item_struct = item_struct(
&path,
fields,
&derive_macro_call,
&struct_name,
&visibility,
&item_struct_name,
&field_types,
&user_impl_generics_with_world,
&field_attrs,
&field_visibilities,
&field_idents,
&user_ty_generics,
&user_ty_generics_with_world,
user_where_clauses_with_world,
);
let mutable_world_query_impl = world_query_impl(
&path,
&struct_name,
&visibility,
&fetch_struct_name,
&field_types,
&user_impl_generics,
&user_impl_generics_with_world,
&user_ty_generics,
&user_ty_generics_with_world,
&named_field_idents,
&marker_name,
&state_struct_name,
user_where_clauses,
user_where_clauses_with_world,
);
let (read_only_struct, read_only_impl) = if attributes.is_mutable {
// If the query is mutable, we need to generate a separate readonly version of some things
let readonly_item_struct = item_struct(
&path,
fields,
&derive_macro_call,
&read_only_struct_name,
&visibility,
&read_only_item_struct_name,
&read_only_field_types,
&user_impl_generics_with_world,
&field_attrs,
&field_visibilities,
&field_idents,
&user_ty_generics,
&user_ty_generics_with_world,
user_where_clauses_with_world,
);
let readonly_world_query_impl = world_query_impl(
&path,
&read_only_struct_name,
&visibility,
&read_only_fetch_struct_name,
&read_only_field_types,
&user_impl_generics,
&user_impl_generics_with_world,
&user_ty_generics,
&user_ty_generics_with_world,
&named_field_idents,
&marker_name,
&state_struct_name,
user_where_clauses,
user_where_clauses_with_world,
);
let read_only_structs = quote! {
#[doc = concat!(
"Automatically generated [`WorldQuery`](",
stringify!(#path),
"::query::WorldQuery) type for a read-only variant of [`",
stringify!(#struct_name),
"`]."
)]
#[automatically_derived]
#visibility struct #read_only_struct_name #user_impl_generics #user_where_clauses {
#(
#[doc = "Automatically generated read-only field for accessing `"]
#[doc = stringify!(#field_types)]
#[doc = "`."]
#field_visibilities #named_field_idents: #read_only_field_types,
)*
}
#readonly_item_struct
};
(read_only_structs, readonly_world_query_impl)
} else {
(quote! {}, quote! {})
};
let data_impl = {
let read_only_data_impl = if attributes.is_mutable {
quote! {
/// SAFETY: we assert fields are readonly below
unsafe impl #user_impl_generics #path::query::QueryData
for #read_only_struct_name #user_ty_generics #user_where_clauses {
const IS_READ_ONLY: bool = true;
type ReadOnly = #read_only_struct_name #user_ty_generics;
type Item<'__w> = #read_only_item_struct_name #user_ty_generics_with_world;
fn shrink<'__wlong: '__wshort, '__wshort>(
item: Self::Item<'__wlong>
) -> Self::Item<'__wshort> {
#read_only_item_struct_name {
#(
#field_idents: <#read_only_field_types>::shrink(item.#field_idents),
)*
}
}
/// SAFETY: we call `fetch` for each member that implements `Fetch`.
#[inline(always)]
unsafe fn fetch<'__w>(
_fetch: &mut <Self as #path::query::WorldQuery>::Fetch<'__w>,
_entity: #path::entity::Entity,
_table_row: #path::storage::TableRow,
) -> Self::Item<'__w> {
Self::Item {
#(#field_idents: <#read_only_field_types>::fetch(&mut _fetch.#named_field_idents, _entity, _table_row),)*
}
}
}
}
} else {
quote! {}
};
let is_read_only = !attributes.is_mutable;
quote! {
/// SAFETY: we assert fields are readonly below
unsafe impl #user_impl_generics #path::query::QueryData
for #struct_name #user_ty_generics #user_where_clauses {
const IS_READ_ONLY: bool = #is_read_only;
type ReadOnly = #read_only_struct_name #user_ty_generics;
type Item<'__w> = #item_struct_name #user_ty_generics_with_world;
fn shrink<'__wlong: '__wshort, '__wshort>(
item: Self::Item<'__wlong>
) -> Self::Item<'__wshort> {
#item_struct_name {
#(
#field_idents: <#field_types>::shrink(item.#field_idents),
)*
}
}
/// SAFETY: we call `fetch` for each member that implements `Fetch`.
#[inline(always)]
unsafe fn fetch<'__w>(
_fetch: &mut <Self as #path::query::WorldQuery>::Fetch<'__w>,
_entity: #path::entity::Entity,
_table_row: #path::storage::TableRow,
) -> Self::Item<'__w> {
Self::Item {
#(#field_idents: <#field_types>::fetch(&mut _fetch.#named_field_idents, _entity, _table_row),)*
}
}
}
#read_only_data_impl
}
};
let read_only_data_impl = quote! {
/// SAFETY: we assert fields are readonly below
unsafe impl #user_impl_generics #path::query::ReadOnlyQueryData
for #read_only_struct_name #user_ty_generics #user_where_clauses {}
};
let read_only_asserts = if attributes.is_mutable {
quote! {
// Double-check that the data fetched by `<_ as WorldQuery>::ReadOnly` is read-only.
// This is technically unnecessary as `<_ as WorldQuery>::ReadOnly: ReadOnlyQueryData`
// but to protect against future mistakes we assert the assoc type implements `ReadOnlyQueryData` anyway
#( assert_readonly::<#read_only_field_types>(); )*
}
} else {
quote! {
// Statically checks that the safety guarantee of `ReadOnlyQueryData` for `$fetch_struct_name` actually holds true.
// We need this to make sure that we don't compile `ReadOnlyQueryData` if our struct contains nested `QueryData`
// members that don't implement it. I.e.:
// ```
// #[derive(QueryData)]
// pub struct Foo { a: &'static mut MyComponent }
// ```
#( assert_readonly::<#field_types>(); )*
}
};
let data_asserts = quote! {
#( assert_data::<#field_types>(); )*
};
TokenStream::from(quote! {
#mutable_item_struct
#read_only_struct
const _: () = {
#[doc(hidden)]
#[doc = concat!(
"Automatically generated internal [`WorldQuery`](",
stringify!(#path),
"::query::WorldQuery) state type for [`",
stringify!(#struct_name),
"`], used for caching."
)]
#[automatically_derived]
#visibility struct #state_struct_name #user_impl_generics #user_where_clauses {
#(#named_field_idents: <#field_types as #path::query::WorldQuery>::State,)*
}
#mutable_world_query_impl
#read_only_impl
#data_impl
#read_only_data_impl
};
#[allow(dead_code)]
const _: () = {
fn assert_readonly<T>()
where
T: #path::query::ReadOnlyQueryData,
{
}
fn assert_data<T>()
where
T: #path::query::QueryData,
{
}
// We generate a readonly assertion for every struct member.
fn assert_all #user_impl_generics_with_world () #user_where_clauses_with_world {
#read_only_asserts
#data_asserts
}
};
// The original struct will most likely be left unused. As we don't want our users having
// to specify `#[allow(dead_code)]` for their custom queries, we are using this cursed
// workaround.
#[allow(dead_code)]
const _: () = {
fn dead_code_workaround #user_impl_generics (
q: #struct_name #user_ty_generics,
q2: #read_only_struct_name #user_ty_generics
) #user_where_clauses {
#(q.#field_idents;)*
#(q2.#field_idents;)*
}
};
})
}
struct QueryDataFieldInfo {
/// All field attributes except for `query_data` ones.
attrs: Vec<Attribute>,
}
fn read_world_query_field_info(field: &Field) -> syn::Result<QueryDataFieldInfo> {
let mut attrs = Vec::new();
for attr in &field.attrs {
if attr
.path()
.get_ident()
.is_some_and(|ident| ident == QUERY_DATA_ATTRIBUTE_NAME)
{
return Err(syn::Error::new_spanned(
attr,
"#[derive(QueryData)] does not support field attributes.",
));
}
attrs.push(attr.clone());
}
Ok(QueryDataFieldInfo { attrs })
}

View File

@@ -0,0 +1,167 @@
use bevy_macro_utils::ensure_no_collision;
use proc_macro::TokenStream;
use proc_macro2::{Ident, Span};
use quote::{format_ident, quote};
use syn::{parse_macro_input, parse_quote, Data, DataStruct, DeriveInput, Index};
use crate::{bevy_ecs_path, world_query::world_query_impl};
mod field_attr_keywords {
syn::custom_keyword!(ignore);
}
pub fn derive_query_filter_impl(input: TokenStream) -> TokenStream {
let tokens = input.clone();
let ast = parse_macro_input!(input as DeriveInput);
let visibility = ast.vis;
let path = bevy_ecs_path();
let user_generics = ast.generics.clone();
let (user_impl_generics, user_ty_generics, user_where_clauses) = user_generics.split_for_impl();
let user_generics_with_world = {
let mut generics = ast.generics;
generics.params.insert(0, parse_quote!('__w));
generics
};
let (user_impl_generics_with_world, user_ty_generics_with_world, user_where_clauses_with_world) =
user_generics_with_world.split_for_impl();
let struct_name = ast.ident;
let fetch_struct_name = Ident::new(&format!("{struct_name}Fetch"), Span::call_site());
let fetch_struct_name = ensure_no_collision(fetch_struct_name, tokens.clone());
let marker_name =
ensure_no_collision(format_ident!("_world_query_derive_marker"), tokens.clone());
// Generate a name for the state struct that doesn't conflict
// with the struct definition.
let state_struct_name = Ident::new(&format!("{struct_name}State"), Span::call_site());
let state_struct_name = ensure_no_collision(state_struct_name, tokens);
let Data::Struct(DataStruct { fields, .. }) = &ast.data else {
return syn::Error::new(
Span::call_site(),
"#[derive(WorldQuery)]` only supports structs",
)
.into_compile_error()
.into();
};
let mut field_attrs = Vec::new();
let mut field_visibilities = Vec::new();
let mut field_idents = Vec::new();
let mut named_field_idents = Vec::new();
let mut field_types = Vec::new();
for (i, field) in fields.iter().enumerate() {
let attrs = field.attrs.clone();
let named_field_ident = field
.ident
.as_ref()
.cloned()
.unwrap_or_else(|| format_ident!("f{i}"));
let i = Index::from(i);
let field_ident = field
.ident
.as_ref()
.map_or(quote! { #i }, |i| quote! { #i });
field_idents.push(field_ident);
named_field_idents.push(named_field_ident);
field_attrs.push(attrs);
field_visibilities.push(field.vis.clone());
let field_ty = field.ty.clone();
field_types.push(quote!(#field_ty));
}
let world_query_impl = world_query_impl(
&path,
&struct_name,
&visibility,
&fetch_struct_name,
&field_types,
&user_impl_generics,
&user_impl_generics_with_world,
&user_ty_generics,
&user_ty_generics_with_world,
&named_field_idents,
&marker_name,
&state_struct_name,
user_where_clauses,
user_where_clauses_with_world,
);
let filter_impl = quote! {
// SAFETY: This only performs access that subqueries perform, and they impl `QueryFilter` and so perform no mutable access.
unsafe impl #user_impl_generics #path::query::QueryFilter
for #struct_name #user_ty_generics #user_where_clauses {
const IS_ARCHETYPAL: bool = true #(&& <#field_types>::IS_ARCHETYPAL)*;
#[allow(unused_variables)]
#[inline(always)]
unsafe fn filter_fetch<'__w>(
_fetch: &mut <Self as #path::query::WorldQuery>::Fetch<'__w>,
_entity: #path::entity::Entity,
_table_row: #path::storage::TableRow,
) -> bool {
true #(&& <#field_types>::filter_fetch(&mut _fetch.#named_field_idents, _entity, _table_row))*
}
}
};
let filter_asserts = quote! {
#( assert_filter::<#field_types>(); )*
};
TokenStream::from(quote! {
const _: () = {
#[doc(hidden)]
#[doc = concat!(
"Automatically generated internal [`WorldQuery`](",
stringify!(#path),
"::query::WorldQuery) state type for [`",
stringify!(#struct_name),
"`], used for caching."
)]
#[automatically_derived]
#visibility struct #state_struct_name #user_impl_generics #user_where_clauses {
#(#named_field_idents: <#field_types as #path::query::WorldQuery>::State,)*
}
#world_query_impl
#filter_impl
};
#[allow(dead_code)]
const _: () = {
fn assert_filter<T>()
where
T: #path::query::QueryFilter,
{
}
// We generate a filter assertion for every struct member.
fn assert_all #user_impl_generics_with_world () #user_where_clauses_with_world {
#filter_asserts
}
};
// The original struct will most likely be left unused. As we don't want our users having
// to specify `#[allow(dead_code)]` for their custom queries, we are using this cursed
// workaround.
#[allow(dead_code)]
const _: () = {
fn dead_code_workaround #user_impl_generics (
q: #struct_name #user_ty_generics,
q2: #struct_name #user_ty_generics
) #user_where_clauses {
#(q.#field_idents;)*
#(q2.#field_idents;)*
}
};
})
}

144
vendor/bevy_ecs_macros/src/states.rs vendored Normal file
View File

@@ -0,0 +1,144 @@
use proc_macro::TokenStream;
use quote::{format_ident, quote};
use syn::{parse_macro_input, spanned::Spanned, DeriveInput, Pat, Path, Result};
use crate::bevy_ecs_path;
pub fn derive_states(input: TokenStream) -> TokenStream {
let ast = parse_macro_input!(input as DeriveInput);
let generics = ast.generics;
let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
let mut base_trait_path = bevy_ecs_path();
base_trait_path
.segments
.push(format_ident!("schedule").into());
let mut trait_path = base_trait_path.clone();
trait_path.segments.push(format_ident!("States").into());
let mut state_mutation_trait_path = base_trait_path.clone();
state_mutation_trait_path
.segments
.push(format_ident!("FreelyMutableState").into());
let struct_name = &ast.ident;
quote! {
impl #impl_generics #trait_path for #struct_name #ty_generics #where_clause {}
impl #impl_generics #state_mutation_trait_path for #struct_name #ty_generics #where_clause {
}
}
.into()
}
struct Source {
source_type: Path,
source_value: Pat,
}
fn parse_sources_attr(ast: &DeriveInput) -> Result<Source> {
let mut result = ast
.attrs
.iter()
.filter(|a| a.path().is_ident("source"))
.map(|meta| {
let mut source = None;
let value = meta.parse_nested_meta(|nested| {
let source_type = nested.path.clone();
let source_value = Pat::parse_multi(nested.value()?)?;
source = Some(Source {
source_type,
source_value,
});
Ok(())
});
match source {
Some(value) => Ok(value),
None => match value {
Ok(_) => Err(syn::Error::new(
ast.span(),
"Couldn't parse SubStates source",
)),
Err(e) => Err(e),
},
}
})
.collect::<Result<Vec<_>>>()?;
if result.len() > 1 {
return Err(syn::Error::new(
ast.span(),
"Only one source is allowed for SubStates",
));
}
let Some(result) = result.pop() else {
return Err(syn::Error::new(ast.span(), "SubStates require a source"));
};
Ok(result)
}
pub fn derive_substates(input: TokenStream) -> TokenStream {
let ast = parse_macro_input!(input as DeriveInput);
let sources = parse_sources_attr(&ast).expect("Failed to parse substate sources");
let generics = ast.generics;
let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
let mut base_trait_path = bevy_ecs_path();
base_trait_path
.segments
.push(format_ident!("schedule").into());
let mut trait_path = base_trait_path.clone();
trait_path.segments.push(format_ident!("SubStates").into());
let mut state_set_trait_path = base_trait_path.clone();
state_set_trait_path
.segments
.push(format_ident!("StateSet").into());
let mut state_trait_path = base_trait_path.clone();
state_trait_path
.segments
.push(format_ident!("States").into());
let mut state_mutation_trait_path = base_trait_path.clone();
state_mutation_trait_path
.segments
.push(format_ident!("FreelyMutableState").into());
let struct_name = &ast.ident;
let source_state_type = sources.source_type;
let source_state_value = sources.source_value;
let result = quote! {
impl #impl_generics #trait_path for #struct_name #ty_generics #where_clause {
type SourceStates = #source_state_type;
fn should_exist(sources: #source_state_type) -> Option<Self> {
if matches!(sources, #source_state_value) {
Some(Self::default())
} else {
None
}
}
}
impl #impl_generics #state_trait_path for #struct_name #ty_generics #where_clause {
const DEPENDENCY_DEPTH : usize = <Self as #trait_path>::SourceStates::SET_DEPENDENCY_DEPTH + 1;
}
impl #impl_generics #state_mutation_trait_path for #struct_name #ty_generics #where_clause {
}
};
// panic!("Got Result\n{}", result.to_string());
result.into()
}

View File

@@ -0,0 +1,176 @@
use proc_macro2::Ident;
use quote::quote;
use syn::{Attribute, Fields, ImplGenerics, TypeGenerics, Visibility, WhereClause};
pub(crate) fn item_struct(
path: &syn::Path,
fields: &Fields,
derive_macro_call: &proc_macro2::TokenStream,
struct_name: &Ident,
visibility: &Visibility,
item_struct_name: &Ident,
field_types: &Vec<proc_macro2::TokenStream>,
user_impl_generics_with_world: &ImplGenerics,
field_attrs: &Vec<Vec<Attribute>>,
field_visibilities: &Vec<Visibility>,
field_idents: &Vec<proc_macro2::TokenStream>,
user_ty_generics: &TypeGenerics,
user_ty_generics_with_world: &TypeGenerics,
user_where_clauses_with_world: Option<&WhereClause>,
) -> proc_macro2::TokenStream {
let item_attrs = quote! {
#[doc = concat!(
"Automatically generated [`WorldQuery`](",
stringify!(#path),
"::query::WorldQuery) item type for [`",
stringify!(#struct_name),
"`], returned when iterating over query results."
)]
#[automatically_derived]
};
match fields {
Fields::Named(_) => quote! {
#derive_macro_call
#item_attrs
#visibility struct #item_struct_name #user_impl_generics_with_world #user_where_clauses_with_world {
#(#(#field_attrs)* #field_visibilities #field_idents: <#field_types as #path::query::QueryData>::Item<'__w>,)*
}
},
Fields::Unnamed(_) => quote! {
#derive_macro_call
#item_attrs
#visibility struct #item_struct_name #user_impl_generics_with_world #user_where_clauses_with_world(
#( #field_visibilities <#field_types as #path::query::QueryData>::Item<'__w>, )*
);
},
Fields::Unit => quote! {
#item_attrs
#visibility type #item_struct_name #user_ty_generics_with_world = #struct_name #user_ty_generics;
},
}
}
pub(crate) fn world_query_impl(
path: &syn::Path,
struct_name: &Ident,
visibility: &Visibility,
fetch_struct_name: &Ident,
field_types: &Vec<proc_macro2::TokenStream>,
user_impl_generics: &ImplGenerics,
user_impl_generics_with_world: &ImplGenerics,
user_ty_generics: &TypeGenerics,
user_ty_generics_with_world: &TypeGenerics,
named_field_idents: &Vec<Ident>,
marker_name: &Ident,
state_struct_name: &Ident,
user_where_clauses: Option<&WhereClause>,
user_where_clauses_with_world: Option<&WhereClause>,
) -> proc_macro2::TokenStream {
quote! {
#[doc(hidden)]
#[doc = concat!(
"Automatically generated internal [`WorldQuery`](",
stringify!(#path),
"::query::WorldQuery) fetch type for [`",
stringify!(#struct_name),
"`], used to define the world data accessed by this query."
)]
#[automatically_derived]
#visibility struct #fetch_struct_name #user_impl_generics_with_world #user_where_clauses_with_world {
#(#named_field_idents: <#field_types as #path::query::WorldQuery>::Fetch<'__w>,)*
#marker_name: &'__w (),
}
impl #user_impl_generics_with_world Clone for #fetch_struct_name #user_ty_generics_with_world
#user_where_clauses_with_world {
fn clone(&self) -> Self {
Self {
#(#named_field_idents: self.#named_field_idents.clone(),)*
#marker_name: &(),
}
}
}
// SAFETY: `update_component_access` and `update_archetype_component_access` are called on every field
unsafe impl #user_impl_generics #path::query::WorldQuery
for #struct_name #user_ty_generics #user_where_clauses {
type Fetch<'__w> = #fetch_struct_name #user_ty_generics_with_world;
type State = #state_struct_name #user_ty_generics;
fn shrink_fetch<'__wlong: '__wshort, '__wshort>(
fetch: <#struct_name #user_ty_generics as #path::query::WorldQuery>::Fetch<'__wlong>
) -> <#struct_name #user_ty_generics as #path::query::WorldQuery>::Fetch<'__wshort> {
#fetch_struct_name {
#(
#named_field_idents: <#field_types>::shrink_fetch(fetch.#named_field_idents),
)*
#marker_name: &(),
}
}
unsafe fn init_fetch<'__w>(
_world: #path::world::unsafe_world_cell::UnsafeWorldCell<'__w>,
state: &Self::State,
_last_run: #path::component::Tick,
_this_run: #path::component::Tick,
) -> <Self as #path::query::WorldQuery>::Fetch<'__w> {
#fetch_struct_name {
#(#named_field_idents:
<#field_types>::init_fetch(
_world,
&state.#named_field_idents,
_last_run,
_this_run,
),
)*
#marker_name: &(),
}
}
const IS_DENSE: bool = true #(&& <#field_types>::IS_DENSE)*;
/// SAFETY: we call `set_archetype` for each member that implements `Fetch`
#[inline]
unsafe fn set_archetype<'__w>(
_fetch: &mut <Self as #path::query::WorldQuery>::Fetch<'__w>,
_state: &Self::State,
_archetype: &'__w #path::archetype::Archetype,
_table: &'__w #path::storage::Table
) {
#(<#field_types>::set_archetype(&mut _fetch.#named_field_idents, &_state.#named_field_idents, _archetype, _table);)*
}
/// SAFETY: we call `set_table` for each member that implements `Fetch`
#[inline]
unsafe fn set_table<'__w>(
_fetch: &mut <Self as #path::query::WorldQuery>::Fetch<'__w>,
_state: &Self::State,
_table: &'__w #path::storage::Table
) {
#(<#field_types>::set_table(&mut _fetch.#named_field_idents, &_state.#named_field_idents, _table);)*
}
fn update_component_access(state: &Self::State, _access: &mut #path::query::FilteredAccess<#path::component::ComponentId>) {
#( <#field_types>::update_component_access(&state.#named_field_idents, _access); )*
}
fn init_state(world: &mut #path::world::World) -> #state_struct_name #user_ty_generics {
#state_struct_name {
#(#named_field_idents: <#field_types>::init_state(world),)*
}
}
fn get_state(components: &#path::component::Components) -> Option<#state_struct_name #user_ty_generics> {
Some(#state_struct_name {
#(#named_field_idents: <#field_types>::get_state(components)?,)*
})
}
fn matches_component_set(state: &Self::State, _set_contains_id: &impl Fn(#path::component::ComponentId) -> bool) -> bool {
true #(&& <#field_types>::matches_component_set(&state.#named_field_idents, _set_contains_id))*
}
}
}
}